text
stringlengths
0
1.28M
import os import importlib.util from comfy.cli_args import args import subprocess def get_gpu_names(): if os.name == 'nt': import ctypes class DISPLAY_DEVICEA(ctypes.Structure): _fields_ = [ ('cb', ctypes.c_ulong), ('DeviceName', ctypes.c_char * 32), ('DeviceString', ctypes.c_char * 128), ('StateFlags', ctypes.c_ulong), ('DeviceID', ctypes.c_char * 128), ('DeviceKey', ctypes.c_char * 128) ] user32 = ctypes.windll.user32 def enum_display_devices(): device_info = DISPLAY_DEVICEA() device_info.cb = ctypes.sizeof(device_info) device_index = 0 gpu_names = set() while user32.EnumDisplayDevicesA(None, device_index, ctypes.byref(device_info), 0): device_index += 1 gpu_names.add(device_info.DeviceString.decode('utf-8')) return gpu_names return enum_display_devices() else: gpu_names = set() out = subprocess.check_output(['nvidia-smi', '-L']) for l in out.split(b'\n'): if len(l) > 0: gpu_names.add(l.decode('utf-8').split(' (UUID')[0]) return gpu_names blacklist = {"GeForce GTX TITAN X", "GeForce GTX 980", "GeForce GTX 970", "GeForce GTX 960", "GeForce GTX 950", "GeForce 945M", "GeForce 940M", "GeForce 930M", "GeForce 920M", "GeForce 910M", "GeForce GTX 750", "GeForce GTX 745", "Quadro K620", "Quadro K1200", "Quadro K2200", "Quadro M500", "Quadro M520", "Quadro M600", "Quadro M620", "Quadro M1000", "Quadro M1200", "Quadro M2000", "Quadro M2200", "Quadro M3000", "Quadro M4000", "Quadro M5000", "Quadro M5500", "Quadro M6000", "GeForce MX110", "GeForce MX130", "GeForce 830M", "GeForce 840M", "GeForce GTX 850M", "GeForce GTX 860M", "GeForce GTX 1650", "GeForce GTX 1630", "Tesla M4", "Tesla M6", "Tesla M10", "Tesla M40", "Tesla M60" } def cuda_malloc_supported(): try: names = get_gpu_names() except: names = set() for x in names: if "NVIDIA" in x: for b in blacklist: if b in x: return False return True if not args.cuda_malloc: try: version = "" torch_spec = importlib.util.find_spec("torch") for folder in torch_spec.submodule_search_locations: ver_file = os.path.join(folder, "version.py") if os.path.isfile(ver_file): spec = importlib.util.spec_from_file_location("torch_version_import", ver_file) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) version = module.__version__ if int(version[0]) >= 2: args.cuda_malloc = cuda_malloc_supported() except: pass if args.cuda_malloc and not args.disable_cuda_malloc: env_var = os.environ.get('PYTORCH_CUDA_ALLOC_CONF', None) if env_var is None: env_var = "backend:cudaMallocAsync" else: env_var += ",backend:cudaMallocAsync" os.environ['PYTORCH_CUDA_ALLOC_CONF'] = env_var
import sys import copy import logging import threading import heapq import traceback import inspect from typing import List, Literal, NamedTuple, Optional import torch import nodes import comfy.model_management def get_input_data(inputs, class_def, unique_id, outputs={}, prompt={}, extra_data={}): valid_inputs = class_def.INPUT_TYPES() input_data_all = {} for x in inputs: input_data = inputs[x] if isinstance(input_data, list): input_unique_id = input_data[0] output_index = input_data[1] if input_unique_id not in outputs: input_data_all[x] = (None,) continue obj = outputs[input_unique_id][output_index] input_data_all[x] = obj else: if ("required" in valid_inputs and x in valid_inputs["required"]) or ("optional" in valid_inputs and x in valid_inputs["optional"]): input_data_all[x] = [input_data] if "hidden" in valid_inputs: h = valid_inputs["hidden"] for x in h: if h[x] == "PROMPT": input_data_all[x] = [prompt] if h[x] == "EXTRA_PNGINFO": input_data_all[x] = [extra_data.get('extra_pnginfo', None)] if h[x] == "UNIQUE_ID": input_data_all[x] = [unique_id] return input_data_all def map_node_over_list(obj, input_data_all, func, allow_interrupt=False): input_is_list = False if hasattr(obj, "INPUT_IS_LIST"): input_is_list = obj.INPUT_IS_LIST if len(input_data_all) == 0: max_len_input = 0 else: max_len_input = max([len(x) for x in input_data_all.values()]) def slice_dict(d, i): d_new = dict() for k,v in d.items(): d_new[k] = v[i if len(v) > i else -1] return d_new results = [] if input_is_list: if allow_interrupt: nodes.before_node_execution() results.append(getattr(obj, func)(**input_data_all)) elif max_len_input == 0: if allow_interrupt: nodes.before_node_execution() results.append(getattr(obj, func)()) else: for i in range(max_len_input): if allow_interrupt: nodes.before_node_execution() results.append(getattr(obj, func)(**slice_dict(input_data_all, i))) return results def get_output_data(obj, input_data_all): results = [] uis = [] return_values = map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True) for r in return_values: if isinstance(r, dict): if 'ui' in r: uis.append(r['ui']) if 'result' in r: results.append(r['result']) else: results.append(r) output = [] if len(results) > 0: output_is_list = [False] * len(results[0]) if hasattr(obj, "OUTPUT_IS_LIST"): output_is_list = obj.OUTPUT_IS_LIST for i, is_list in zip(range(len(results[0])), output_is_list): if is_list: output.append([x for o in results for x in o[i]]) else: output.append([o[i] for o in results]) ui = dict() if len(uis) > 0: ui = {k: [y for x in uis for y in x[k]] for k in uis[0].keys()} return output, ui def format_value(x): if x is None: return None elif isinstance(x, (int, float, bool, str)): return x else: return str(x) def recursive_execute(server, prompt, outputs, current_item, extra_data, executed, prompt_id, outputs_ui, object_storage): unique_id = current_item inputs = prompt[unique_id]['inputs'] class_type = prompt[unique_id]['class_type'] class_def = nodes.NODE_CLASS_MAPPINGS[class_type] if unique_id in outputs: return (True, None, None) for x in inputs: input_data = inputs[x] if isinstance(input_data, list): input_unique_id = input_data[0] output_index = input_data[1] if input_unique_id not in outputs: result = recursive_execute(server, prompt, outputs, input_unique_id, extra_data, executed, prompt_id, outputs_ui, object_storage) if result[0] is not True: return result input_data_all = None try: input_data_all = get_input_data(inputs, class_def, unique_id, outputs, prompt, extra_data) if server.client_id is not None: server.last_node_id = unique_id server.send_sync("executing", { "node": unique_id, "prompt_id": prompt_id }, server.client_id) obj = object_storage.get((unique_id, class_type), None) if obj is None: obj = class_def() object_storage[(unique_id, class_type)] = obj output_data, output_ui = get_output_data(obj, input_data_all) outputs[unique_id] = output_data if len(output_ui) > 0: outputs_ui[unique_id] = output_ui if server.client_id is not None: server.send_sync("executed", { "node": unique_id, "output": output_ui, "prompt_id": prompt_id }, server.client_id) except comfy.model_management.InterruptProcessingException as iex: logging.info("Processing interrupted") error_details = { "node_id": unique_id, } return (False, error_details, iex) except Exception as ex: typ, _, tb = sys.exc_info() exception_type = full_type_name(typ) input_data_formatted = {} if input_data_all is not None: input_data_formatted = {} for name, inputs in input_data_all.items(): input_data_formatted[name] = [format_value(x) for x in inputs] output_data_formatted = {} for node_id, node_outputs in outputs.items(): output_data_formatted[node_id] = [[format_value(x) for x in l] for l in node_outputs] logging.error(f"!!! Exception during processing!!! {ex}") logging.error(traceback.format_exc()) error_details = { "node_id": unique_id, "exception_message": str(ex), "exception_type": exception_type, "traceback": traceback.format_tb(tb), "current_inputs": input_data_formatted, "current_outputs": output_data_formatted } return (False, error_details, ex) executed.add(unique_id) return (True, None, None) def recursive_will_execute(prompt, outputs, current_item, memo={}): unique_id = current_item if unique_id in memo: return memo[unique_id] inputs = prompt[unique_id]['inputs'] will_execute = [] if unique_id in outputs: return [] for x in inputs: input_data = inputs[x] if isinstance(input_data, list): input_unique_id = input_data[0] output_index = input_data[1] if input_unique_id not in outputs: will_execute += recursive_will_execute(prompt, outputs, input_unique_id, memo) memo[unique_id] = will_execute + [unique_id] return memo[unique_id] def recursive_output_delete_if_changed(prompt, old_prompt, outputs, current_item): unique_id = current_item inputs = prompt[unique_id]['inputs'] class_type = prompt[unique_id]['class_type'] class_def = nodes.NODE_CLASS_MAPPINGS[class_type] is_changed_old = '' is_changed = '' to_delete = False if hasattr(class_def, 'IS_CHANGED'): if unique_id in old_prompt and 'is_changed' in old_prompt[unique_id]: is_changed_old = old_prompt[unique_id]['is_changed'] if 'is_changed' not in prompt[unique_id]: input_data_all = get_input_data(inputs, class_def, unique_id, outputs) if input_data_all is not None: try: is_changed = map_node_over_list(class_def, input_data_all, "IS_CHANGED") prompt[unique_id]['is_changed'] = is_changed except: to_delete = True else: is_changed = prompt[unique_id]['is_changed'] if unique_id not in outputs: return True if not to_delete: if is_changed != is_changed_old: to_delete = True elif unique_id not in old_prompt: to_delete = True elif inputs == old_prompt[unique_id]['inputs']: for x in inputs: input_data = inputs[x] if isinstance(input_data, list): input_unique_id = input_data[0] output_index = input_data[1] if input_unique_id in outputs: to_delete = recursive_output_delete_if_changed(prompt, old_prompt, outputs, input_unique_id) else: to_delete = True if to_delete: break else: to_delete = True if to_delete: d = outputs.pop(unique_id) del d return to_delete class PromptExecutor: def __init__(self, server): self.server = server self.reset() def reset(self): self.outputs = {} self.object_storage = {} self.outputs_ui = {} self.status_messages = [] self.success = True self.old_prompt = {} def add_message(self, event, data, broadcast: bool): self.status_messages.append((event, data)) if self.server.client_id is not None or broadcast: self.server.send_sync(event, data, self.server.client_id) def handle_execution_error(self, prompt_id, prompt, current_outputs, executed, error, ex): node_id = error["node_id"] class_type = prompt[node_id]["class_type"] if isinstance(ex, comfy.model_management.InterruptProcessingException): mes = { "prompt_id": prompt_id, "node_id": node_id, "node_type": class_type, "executed": list(executed), } self.add_message("execution_interrupted", mes, broadcast=True) else: mes = { "prompt_id": prompt_id, "node_id": node_id, "node_type": class_type, "executed": list(executed), "exception_message": error["exception_message"], "exception_type": error["exception_type"], "traceback": error["traceback"], "current_inputs": error["current_inputs"], "current_outputs": error["current_outputs"], } self.add_message("execution_error", mes, broadcast=False) to_delete = [] for o in self.outputs: if (o not in current_outputs) and (o not in executed): to_delete += [o] if o in self.old_prompt: d = self.old_prompt.pop(o) del d for o in to_delete: d = self.outputs.pop(o) del d def execute(self, prompt, prompt_id, extra_data={}, execute_outputs=[]): nodes.interrupt_processing(False) if "client_id" in extra_data: self.server.client_id = extra_data["client_id"] else: self.server.client_id = None self.status_messages = [] self.add_message("execution_start", { "prompt_id": prompt_id}, broadcast=False) with torch.inference_mode(): to_delete = [] for o in self.outputs: if o not in prompt: to_delete += [o] for o in to_delete: d = self.outputs.pop(o) del d to_delete = [] for o in self.object_storage: if o[0] not in prompt: to_delete += [o] else: p = prompt[o[0]] if o[1] != p['class_type']: to_delete += [o] for o in to_delete: d = self.object_storage.pop(o) del d for x in prompt: recursive_output_delete_if_changed(prompt, self.old_prompt, self.outputs, x) current_outputs = set(self.outputs.keys()) for x in list(self.outputs_ui.keys()): if x not in current_outputs: d = self.outputs_ui.pop(x) del d comfy.model_management.cleanup_models(keep_clone_weights_loaded=True) self.add_message("execution_cached", { "nodes": list(current_outputs) , "prompt_id": prompt_id}, broadcast=False) executed = set() output_node_id = None to_execute = [] for node_id in list(execute_outputs): to_execute += [(0, node_id)] while len(to_execute) > 0: memo = {} to_execute = sorted(list(map(lambda a: (len(recursive_will_execute(prompt, self.outputs, a[-1], memo)), a[-1]), to_execute))) output_node_id = to_execute.pop(0)[-1] self.success, error, ex = recursive_execute(self.server, prompt, self.outputs, output_node_id, extra_data, executed, prompt_id, self.outputs_ui, self.object_storage) if self.success is not True: self.handle_execution_error(prompt_id, prompt, current_outputs, executed, error, ex) break for x in executed: self.old_prompt[x] = copy.deepcopy(prompt[x]) self.server.last_node_id = None if comfy.model_management.DISABLE_SMART_MEMORY: comfy.model_management.unload_all_models() def validate_inputs(prompt, item, validated): unique_id = item if unique_id in validated: return validated[unique_id] inputs = prompt[unique_id]['inputs'] class_type = prompt[unique_id]['class_type'] obj_class = nodes.NODE_CLASS_MAPPINGS[class_type] class_inputs = obj_class.INPUT_TYPES() required_inputs = class_inputs['required'] errors = [] valid = True validate_function_inputs = [] if hasattr(obj_class, "VALIDATE_INPUTS"): validate_function_inputs = inspect.getfullargspec(obj_class.VALIDATE_INPUTS).args for x in required_inputs: if x not in inputs: error = { "type": "required_input_missing", "message": "Required input is missing", "details": f"{x}", "extra_info": { "input_name": x } } errors.append(error) continue val = inputs[x] info = required_inputs[x] type_input = info[0] if isinstance(val, list): if len(val) != 2: error = { "type": "bad_linked_input", "message": "Bad linked input, must be a length-2 list of [node_id, slot_index]", "details": f"{x}", "extra_info": { "input_name": x, "input_config": info, "received_value": val } } errors.append(error) continue o_id = val[0] o_class_type = prompt[o_id]['class_type'] r = nodes.NODE_CLASS_MAPPINGS[o_class_type].RETURN_TYPES if r[val[1]] != type_input: received_type = r[val[1]] details = f"{x}, {received_type} != {type_input}" error = { "type": "return_type_mismatch", "message": "Return type mismatch between linked nodes", "details": details, "extra_info": { "input_name": x, "input_config": info, "received_type": received_type, "linked_node": val } } errors.append(error) continue try: r = validate_inputs(prompt, o_id, validated) if r[0] is False: valid = False continue except Exception as ex: typ, _, tb = sys.exc_info() valid = False exception_type = full_type_name(typ) reasons = [{ "type": "exception_during_inner_validation", "message": "Exception when validating inner node", "details": str(ex), "extra_info": { "input_name": x, "input_config": info, "exception_message": str(ex), "exception_type": exception_type, "traceback": traceback.format_tb(tb), "linked_node": val } }] validated[o_id] = (False, reasons, o_id) continue else: try: if type_input == "INT": val = int(val) inputs[x] = val if type_input == "FLOAT": val = float(val) inputs[x] = val if type_input == "STRING": val = str(val) inputs[x] = val except Exception as ex: error = { "type": "invalid_input_type", "message": f"Failed to convert an input value to a {type_input} value", "details": f"{x}, {val}, {ex}", "extra_info": { "input_name": x, "input_config": info, "received_value": val, "exception_message": str(ex) } } errors.append(error) continue if len(info) > 1: if "min" in info[1] and val < info[1]["min"]: error = { "type": "value_smaller_than_min", "message": "Value {} smaller than min of {}".format(val, info[1]["min"]), "details": f"{x}", "extra_info": { "input_name": x, "input_config": info, "received_value": val, } } errors.append(error) continue if "max" in info[1] and val > info[1]["max"]: error = { "type": "value_bigger_than_max", "message": "Value {} bigger than max of {}".format(val, info[1]["max"]), "details": f"{x}", "extra_info": { "input_name": x, "input_config": info, "received_value": val, } } errors.append(error) continue if x not in validate_function_inputs: if isinstance(type_input, list): if val not in type_input: input_config = info list_info = "" if len(type_input) > 20: list_info = f"(list of length {len(type_input)})" input_config = None else: list_info = str(type_input) error = { "type": "value_not_in_list", "message": "Value not in list", "details": f"{x}: '{val}' not in {list_info}", "extra_info": { "input_name": x, "input_config": input_config, "received_value": val, } } errors.append(error) continue if len(validate_function_inputs) > 0: input_data_all = get_input_data(inputs, obj_class, unique_id) input_filtered = {} for x in input_data_all: if x in validate_function_inputs: input_filtered[x] = input_data_all[x] ret = map_node_over_list(obj_class, input_filtered, "VALIDATE_INPUTS") for x in input_filtered: for i, r in enumerate(ret): if r is not True: details = f"{x}" if r is not False: details += f" - {str(r)}" error = { "type": "custom_validation_failed", "message": "Custom validation failed for node", "details": details, "extra_info": { "input_name": x, "input_config": info, "received_value": val, } } errors.append(error) continue if len(errors) > 0 or valid is not True: ret = (False, errors, unique_id) else: ret = (True, [], unique_id) validated[unique_id] = ret return ret def full_type_name(klass): module = klass.__module__ if module == 'builtins': return klass.__qualname__ return module + '.' + klass.__qualname__ def validate_prompt(prompt): outputs = set() for x in prompt: if 'class_type' not in prompt[x]: error = { "type": "invalid_prompt", "message": f"Cannot execute because a node is missing the class_type property.", "details": f"Node ID ' "extra_info": {} } return (False, error, [], []) class_type = prompt[x]['class_type'] class_ = nodes.NODE_CLASS_MAPPINGS.get(class_type, None) if class_ is None: error = { "type": "invalid_prompt", "message": f"Cannot execute because node {class_type} does not exist.", "details": f"Node ID ' "extra_info": {} } return (False, error, [], []) if hasattr(class_, 'OUTPUT_NODE') and class_.OUTPUT_NODE is True: outputs.add(x) if len(outputs) == 0: error = { "type": "prompt_no_outputs", "message": "Prompt has no outputs", "details": "", "extra_info": {} } return (False, error, [], []) good_outputs = set() errors = [] node_errors = {} validated = {} for o in outputs: valid = False reasons = [] try: m = validate_inputs(prompt, o, validated) valid = m[0] reasons = m[1] except Exception as ex: typ, _, tb = sys.exc_info() valid = False exception_type = full_type_name(typ) reasons = [{ "type": "exception_during_validation", "message": "Exception when validating node", "details": str(ex), "extra_info": { "exception_type": exception_type, "traceback": traceback.format_tb(tb) } }] validated[o] = (False, reasons, o) if valid is True: good_outputs.add(o) else: logging.error(f"Failed to validate prompt for output {o}:") if len(reasons) > 0: logging.error("* (prompt):") for reason in reasons: logging.error(f" - {reason['message']}: {reason['details']}") errors += [(o, reasons)] for node_id, result in validated.items(): valid = result[0] reasons = result[1] if valid is not True and len(reasons) > 0: if node_id not in node_errors: class_type = prompt[node_id]['class_type'] node_errors[node_id] = { "errors": reasons, "dependent_outputs": [], "class_type": class_type } logging.error(f"* {class_type} {node_id}:") for reason in reasons: logging.error(f" - {reason['message']}: {reason['details']}") node_errors[node_id]["dependent_outputs"].append(o) logging.error("Output will be ignored") if len(good_outputs) == 0: errors_list = [] for o, errors in errors: for error in errors: errors_list.append(f"{error['message']}: {error['details']}") errors_list = "\n".join(errors_list) error = { "type": "prompt_outputs_failed_validation", "message": "Prompt outputs failed validation", "details": errors_list, "extra_info": {} } return (False, error, list(good_outputs), node_errors) return (True, None, list(good_outputs), node_errors) MAXIMUM_HISTORY_SIZE = 10000 class PromptQueue: def __init__(self, server): self.server = server self.mutex = threading.RLock() self.not_empty = threading.Condition(self.mutex) self.task_counter = 0 self.queue = [] self.currently_running = {} self.history = {} self.flags = {} server.prompt_queue = self def put(self, item): with self.mutex: heapq.heappush(self.queue, item) self.server.queue_updated() self.not_empty.notify() def get(self, timeout=None): with self.not_empty: while len(self.queue) == 0: self.not_empty.wait(timeout=timeout) if timeout is not None and len(self.queue) == 0: return None item = heapq.heappop(self.queue) i = self.task_counter self.currently_running[i] = copy.deepcopy(item) self.task_counter += 1 self.server.queue_updated() return (item, i) class ExecutionStatus(NamedTuple): status_str: Literal['success', 'error'] completed: bool messages: List[str] def task_done(self, item_id, outputs, status: Optional['PromptQueue.ExecutionStatus']): with self.mutex: prompt = self.currently_running.pop(item_id) if len(self.history) > MAXIMUM_HISTORY_SIZE: self.history.pop(next(iter(self.history))) status_dict: Optional[dict] = None if status is not None: status_dict = copy.deepcopy(status._asdict()) self.history[prompt[1]] = { "prompt": prompt, "outputs": copy.deepcopy(outputs), 'status': status_dict, } self.server.queue_updated() def get_current_queue(self): with self.mutex: out = [] for x in self.currently_running.values(): out += [x] return (out, copy.deepcopy(self.queue)) def get_tasks_remaining(self): with self.mutex: return len(self.queue) + len(self.currently_running) def wipe_queue(self): with self.mutex: self.queue = [] self.server.queue_updated() def delete_queue_item(self, function): with self.mutex: for x in range(len(self.queue)): if function(self.queue[x]): if len(self.queue) == 1: self.wipe_queue() else: self.queue.pop(x) heapq.heapify(self.queue) self.server.queue_updated() return True return False def get_history(self, prompt_id=None, max_items=None, offset=-1): with self.mutex: if prompt_id is None: out = {} i = 0 if offset < 0 and max_items is not None: offset = len(self.history) - max_items for k in self.history: if i >= offset: out[k] = self.history[k] if max_items is not None and len(out) >= max_items: break i += 1 return out elif prompt_id in self.history: return {prompt_id: copy.deepcopy(self.history[prompt_id])} else: return {} def wipe_history(self): with self.mutex: self.history = {} def delete_history_item(self, id_to_delete): with self.mutex: self.history.pop(id_to_delete, None) def set_flag(self, name, data): with self.mutex: self.flags[name] = data self.not_empty.notify() def get_flags(self, reset=True): with self.mutex: if reset: ret = self.flags self.flags = {} return ret else: return self.flags.copy()
import os import time import logging supported_pt_extensions = set(['.ckpt', '.pt', '.bin', '.pth', '.safetensors', '.pkl']) folder_names_and_paths = {} base_path = os.path.dirname(os.path.realpath(__file__)) models_dir = os.path.join(base_path, "models") folder_names_and_paths["checkpoints"] = ([os.path.join(models_dir, "checkpoints")], supported_pt_extensions) folder_names_and_paths["configs"] = ([os.path.join(models_dir, "configs")], [".yaml"]) folder_names_and_paths["loras"] = ([os.path.join(models_dir, "loras")], supported_pt_extensions) folder_names_and_paths["vae"] = ([os.path.join(models_dir, "vae")], supported_pt_extensions) folder_names_and_paths["clip"] = ([os.path.join(models_dir, "clip")], supported_pt_extensions) folder_names_and_paths["unet"] = ([os.path.join(models_dir, "unet")], supported_pt_extensions) folder_names_and_paths["clip_vision"] = ([os.path.join(models_dir, "clip_vision")], supported_pt_extensions) folder_names_and_paths["style_models"] = ([os.path.join(models_dir, "style_models")], supported_pt_extensions) folder_names_and_paths["embeddings"] = ([os.path.join(models_dir, "embeddings")], supported_pt_extensions) folder_names_and_paths["diffusers"] = ([os.path.join(models_dir, "diffusers")], ["folder"]) folder_names_and_paths["vae_approx"] = ([os.path.join(models_dir, "vae_approx")], supported_pt_extensions) folder_names_and_paths["controlnet"] = ([os.path.join(models_dir, "controlnet"), os.path.join(models_dir, "t2i_adapter")], supported_pt_extensions) folder_names_and_paths["gligen"] = ([os.path.join(models_dir, "gligen")], supported_pt_extensions) folder_names_and_paths["upscale_models"] = ([os.path.join(models_dir, "upscale_models")], supported_pt_extensions) folder_names_and_paths["custom_nodes"] = ([os.path.join(base_path, "custom_nodes")], []) folder_names_and_paths["hypernetworks"] = ([os.path.join(models_dir, "hypernetworks")], supported_pt_extensions) folder_names_and_paths["photomaker"] = ([os.path.join(models_dir, "photomaker")], supported_pt_extensions) folder_names_and_paths["classifiers"] = ([os.path.join(models_dir, "classifiers")], {""}) output_directory = os.path.join(os.path.dirname(os.path.realpath(__file__)), "output") temp_directory = os.path.join(os.path.dirname(os.path.realpath(__file__)), "temp") input_directory = os.path.join(os.path.dirname(os.path.realpath(__file__)), "input") user_directory = os.path.join(os.path.dirname(os.path.realpath(__file__)), "user") filename_list_cache = {} if not os.path.exists(input_directory): try: os.makedirs(input_directory) except: logging.error("Failed to create input directory") def set_output_directory(output_dir): global output_directory output_directory = output_dir def set_temp_directory(temp_dir): global temp_directory temp_directory = temp_dir def set_input_directory(input_dir): global input_directory input_directory = input_dir def get_output_directory(): global output_directory return output_directory def get_temp_directory(): global temp_directory return temp_directory def get_input_directory(): global input_directory return input_directory def get_directory_by_type(type_name): if type_name == "output": return get_output_directory() if type_name == "temp": return get_temp_directory() if type_name == "input": return get_input_directory() return None def annotated_filepath(name): if name.endswith("[output]"): base_dir = get_output_directory() name = name[:-9] elif name.endswith("[input]"): base_dir = get_input_directory() name = name[:-8] elif name.endswith("[temp]"): base_dir = get_temp_directory() name = name[:-7] else: return name, None return name, base_dir def get_annotated_filepath(name, default_dir=None): name, base_dir = annotated_filepath(name) if base_dir is None: if default_dir is not None: base_dir = default_dir else: base_dir = get_input_directory() return os.path.join(base_dir, name) def exists_annotated_filepath(name): name, base_dir = annotated_filepath(name) if base_dir is None: base_dir = get_input_directory() filepath = os.path.join(base_dir, name) return os.path.exists(filepath) def add_model_folder_path(folder_name, full_folder_path): global folder_names_and_paths if folder_name in folder_names_and_paths: folder_names_and_paths[folder_name][0].append(full_folder_path) else: folder_names_and_paths[folder_name] = ([full_folder_path], set()) def get_folder_paths(folder_name): return folder_names_and_paths[folder_name][0][:] def recursive_search(directory, excluded_dir_names=None): if not os.path.isdir(directory): return [], {} if excluded_dir_names is None: excluded_dir_names = [] result = [] dirs = {} try: dirs[directory] = os.path.getmtime(directory) except FileNotFoundError: logging.warning(f"Warning: Unable to access {directory}. Skipping this path.") logging.debug("recursive file list on directory {}".format(directory)) for dirpath, subdirs, filenames in os.walk(directory, followlinks=True, topdown=True): subdirs[:] = [d for d in subdirs if d not in excluded_dir_names] for file_name in filenames: relative_path = os.path.relpath(os.path.join(dirpath, file_name), directory) result.append(relative_path) for d in subdirs: path = os.path.join(dirpath, d) try: dirs[path] = os.path.getmtime(path) except FileNotFoundError: logging.warning(f"Warning: Unable to access {path}. Skipping this path.") continue logging.debug("found {} files".format(len(result))) return result, dirs def filter_files_extensions(files, extensions): return sorted(list(filter(lambda a: os.path.splitext(a)[-1].lower() in extensions or len(extensions) == 0, files))) def get_full_path(folder_name, filename): global folder_names_and_paths if folder_name not in folder_names_and_paths: return None folders = folder_names_and_paths[folder_name] filename = os.path.relpath(os.path.join("/", filename), "/") for x in folders[0]: full_path = os.path.join(x, filename) if os.path.isfile(full_path): return full_path elif os.path.islink(full_path): logging.warning("WARNING path {} exists but doesn't link anywhere, skipping.".format(full_path)) return None def get_filename_list_(folder_name): global folder_names_and_paths output_list = set() folders = folder_names_and_paths[folder_name] output_folders = {} for x in folders[0]: files, folders_all = recursive_search(x, excluded_dir_names=[".git"]) output_list.update(filter_files_extensions(files, folders[1])) output_folders = {**output_folders, **folders_all} return (sorted(list(output_list)), output_folders, time.perf_counter()) def cached_filename_list_(folder_name): global filename_list_cache global folder_names_and_paths if folder_name not in filename_list_cache: return None out = filename_list_cache[folder_name] for x in out[1]: time_modified = out[1][x] folder = x if os.path.getmtime(folder) != time_modified: return None folders = folder_names_and_paths[folder_name] for x in folders[0]: if os.path.isdir(x): if x not in out[1]: return None return out def get_filename_list(folder_name): out = cached_filename_list_(folder_name) if out is None: out = get_filename_list_(folder_name) global filename_list_cache filename_list_cache[folder_name] = out return list(out[0]) def get_save_image_path(filename_prefix, output_dir, image_width=0, image_height=0): def map_filename(filename): prefix_len = len(os.path.basename(filename_prefix)) prefix = filename[:prefix_len + 1] try: digits = int(filename[prefix_len + 1:].split('_')[0]) except: digits = 0 return (digits, prefix) def compute_vars(input, image_width, image_height): input = input.replace("%width%", str(image_width)) input = input.replace("%height%", str(image_height)) return input filename_prefix = compute_vars(filename_prefix, image_width, image_height) subfolder = os.path.dirname(os.path.normpath(filename_prefix)) filename = os.path.basename(os.path.normpath(filename_prefix)) full_output_folder = os.path.join(output_dir, subfolder) if os.path.commonpath((output_dir, os.path.abspath(full_output_folder))) != output_dir: err = "**** ERROR: Saving image outside the output folder is not allowed." + \ "\n full_output_folder: " + os.path.abspath(full_output_folder) + \ "\n output_dir: " + output_dir + \ "\n commonpath: " + os.path.commonpath((output_dir, os.path.abspath(full_output_folder))) logging.error(err) raise Exception(err) try: counter = max(filter(lambda a: os.path.normcase(a[1][:-1]) == os.path.normcase(filename) and a[1][-1] == "_", map(map_filename, os.listdir(full_output_folder))))[0] + 1 except ValueError: counter = 1 except FileNotFoundError: os.makedirs(full_output_folder, exist_ok=True) counter = 1 return full_output_folder, filename, counter, subfolder, filename_prefix
import torch from PIL import Image import struct import numpy as np from comfy.cli_args import args, LatentPreviewMethod from comfy.taesd.taesd import TAESD import comfy.model_management import folder_paths import comfy.utils import logging MAX_PREVIEW_RESOLUTION = 512 def preview_to_image(latent_image): latents_ubyte = (((latent_image + 1.0) / 2.0).clamp(0, 1) .mul(0xFF) ).to(device="cpu", dtype=torch.uint8, non_blocking=comfy.model_management.device_supports_non_blocking(latent_image.device)) return Image.fromarray(latents_ubyte.numpy()) class LatentPreviewer: def decode_latent_to_preview(self, x0): pass def decode_latent_to_preview_image(self, preview_format, x0): preview_image = self.decode_latent_to_preview(x0) return ("JPEG", preview_image, MAX_PREVIEW_RESOLUTION) class TAESDPreviewerImpl(LatentPreviewer): def __init__(self, taesd): self.taesd = taesd def decode_latent_to_preview(self, x0): x_sample = self.taesd.decode(x0[:1])[0].movedim(0, 2) return preview_to_image(x_sample) class Latent2RGBPreviewer(LatentPreviewer): def __init__(self, latent_rgb_factors): self.latent_rgb_factors = torch.tensor(latent_rgb_factors, device="cpu") def decode_latent_to_preview(self, x0): self.latent_rgb_factors = self.latent_rgb_factors.to(dtype=x0.dtype, device=x0.device) latent_image = x0[0].permute(1, 2, 0) @ self.latent_rgb_factors return preview_to_image(latent_image) def get_previewer(device, latent_format): previewer = None method = args.preview_method if method != LatentPreviewMethod.NoPreviews: taesd_decoder_path = None if latent_format.taesd_decoder_name is not None: taesd_decoder_path = next( (fn for fn in folder_paths.get_filename_list("vae_approx") if fn.startswith(latent_format.taesd_decoder_name)), "" ) taesd_decoder_path = folder_paths.get_full_path("vae_approx", taesd_decoder_path) if method == LatentPreviewMethod.Auto: method = LatentPreviewMethod.Latent2RGB if method == LatentPreviewMethod.TAESD: if taesd_decoder_path: taesd = TAESD(None, taesd_decoder_path, latent_channels=latent_format.latent_channels).to(device) previewer = TAESDPreviewerImpl(taesd) else: logging.warning("Warning: TAESD previews enabled, but could not find models/vae_approx/{}".format(latent_format.taesd_decoder_name)) if previewer is None: if latent_format.latent_rgb_factors is not None: previewer = Latent2RGBPreviewer(latent_format.latent_rgb_factors) return previewer def prepare_callback(model, steps, x0_output_dict=None): preview_format = "JPEG" if preview_format not in ["JPEG", "PNG"]: preview_format = "JPEG" previewer = get_previewer(model.load_device, model.model.latent_format) pbar = comfy.utils.ProgressBar(steps) def callback(step, x0, x, total_steps): if x0_output_dict is not None: x0_output_dict["x0"] = x0 preview_bytes = None if previewer: preview_bytes = previewer.decode_latent_to_preview_image(preview_format, x0) pbar.update_absolute(step + 1, total_steps, preview_bytes) return callback
import comfy.options comfy.options.enable_args_parsing() import os import importlib.util import folder_paths import time def execute_prestartup_script(): def execute_script(script_path): module_name = os.path.splitext(script_path)[0] try: spec = importlib.util.spec_from_file_location(module_name, script_path) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) return True except Exception as e: print(f"Failed to execute startup-script: {script_path} / {e}") return False node_paths = folder_paths.get_folder_paths("custom_nodes") for custom_node_path in node_paths: possible_modules = os.listdir(custom_node_path) node_prestartup_times = [] for possible_module in possible_modules: module_path = os.path.join(custom_node_path, possible_module) if os.path.isfile(module_path) or module_path.endswith(".disabled") or module_path == "__pycache__": continue script_path = os.path.join(module_path, "prestartup_script.py") if os.path.exists(script_path): time_before = time.perf_counter() success = execute_script(script_path) node_prestartup_times.append((time.perf_counter() - time_before, module_path, success)) if len(node_prestartup_times) > 0: print("\nPrestartup times for custom nodes:") for n in sorted(node_prestartup_times): if n[2]: import_message = "" else: import_message = " (PRESTARTUP FAILED)" print("{:6.1f} seconds{}:".format(n[0], import_message), n[1]) print() execute_prestartup_script() import asyncio import itertools import shutil import threading import gc from comfy.cli_args import args import logging if os.name == "nt": logging.getLogger("xformers").addFilter(lambda record: 'A matching Triton is not available' not in record.getMessage()) if __name__ == "__main__": if args.cuda_device is not None: os.environ['CUDA_VISIBLE_DEVICES'] = str(args.cuda_device) logging.info("Set cuda device to: {}".format(args.cuda_device)) if args.deterministic: if 'CUBLAS_WORKSPACE_CONFIG' not in os.environ: os.environ['CUBLAS_WORKSPACE_CONFIG'] = ":4096:8" import cuda_malloc import comfy.utils import yaml import execution import server from server import BinaryEventTypes from nodes import init_custom_nodes import comfy.model_management def cuda_malloc_warning(): device = comfy.model_management.get_torch_device() device_name = comfy.model_management.get_torch_device_name(device) cuda_malloc_warning = False if "cudaMallocAsync" in device_name: for b in cuda_malloc.blacklist: if b in device_name: cuda_malloc_warning = True if cuda_malloc_warning: logging.warning("\nWARNING: this card most likely does not support cuda-malloc, if you get \"CUDA error\" please run ComfyUI with: --disable-cuda-malloc\n") def prompt_worker(q, server): e = execution.PromptExecutor(server) last_gc_collect = 0 need_gc = False gc_collect_interval = 10.0 while True: timeout = 1000.0 if need_gc: timeout = max(gc_collect_interval - (current_time - last_gc_collect), 0.0) queue_item = q.get(timeout=timeout) if queue_item is not None: item, item_id = queue_item execution_start_time = time.perf_counter() prompt_id = item[1] server.last_prompt_id = prompt_id e.execute(item[2], prompt_id, item[3], item[4]) need_gc = True q.task_done(item_id, e.outputs_ui, status=execution.PromptQueue.ExecutionStatus( status_str='success' if e.success else 'error', completed=e.success, messages=e.status_messages)) if server.client_id is not None: server.send_sync("executing", { "node": None, "prompt_id": prompt_id }, server.client_id) current_time = time.perf_counter() execution_time = current_time - execution_start_time logging.info("Prompt executed in {:.2f} seconds".format(execution_time)) flags = q.get_flags() free_memory = flags.get("free_memory", False) if flags.get("unload_models", free_memory): comfy.model_management.unload_all_models() need_gc = True last_gc_collect = 0 if free_memory: e.reset() need_gc = True last_gc_collect = 0 if need_gc: current_time = time.perf_counter() if (current_time - last_gc_collect) > gc_collect_interval: comfy.model_management.cleanup_models() gc.collect() comfy.model_management.soft_empty_cache() last_gc_collect = current_time need_gc = False async def run(server, address='', port=8188, verbose=True, call_on_start=None): await asyncio.gather(server.start(address, port, verbose, call_on_start), server.publish_loop()) def hijack_progress(server): def hook(value, total, preview_image): comfy.model_management.throw_exception_if_processing_interrupted() progress = {"value": value, "max": total, "prompt_id": server.last_prompt_id, "node": server.last_node_id} server.send_sync("progress", progress, server.client_id) if preview_image is not None: server.send_sync(BinaryEventTypes.UNENCODED_PREVIEW_IMAGE, preview_image, server.client_id) comfy.utils.set_progress_bar_global_hook(hook) def cleanup_temp(): temp_dir = folder_paths.get_temp_directory() if os.path.exists(temp_dir): shutil.rmtree(temp_dir, ignore_errors=True) def load_extra_path_config(yaml_path): with open(yaml_path, 'r') as stream: config = yaml.safe_load(stream) for c in config: conf = config[c] if conf is None: continue base_path = None if "base_path" in conf: base_path = conf.pop("base_path") for x in conf: for y in conf[x].split("\n"): if len(y) == 0: continue full_path = y if base_path is not None: full_path = os.path.join(base_path, full_path) logging.info("Adding extra search path {} {}".format(x, full_path)) folder_paths.add_model_folder_path(x, full_path) if __name__ == "__main__": if args.temp_directory: temp_dir = os.path.join(os.path.abspath(args.temp_directory), "temp") logging.info(f"Setting temp directory to: {temp_dir}") folder_paths.set_temp_directory(temp_dir) cleanup_temp() if args.windows_standalone_build: try: import new_updater new_updater.update_windows_updater() except: pass loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) server = server.PromptServer(loop) q = execution.PromptQueue(server) extra_model_paths_config_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "extra_model_paths.yaml") if os.path.isfile(extra_model_paths_config_path): load_extra_path_config(extra_model_paths_config_path) if args.extra_model_paths_config: for config_path in itertools.chain(*args.extra_model_paths_config): load_extra_path_config(config_path) init_custom_nodes() cuda_malloc_warning() server.add_routes() hijack_progress(server) threading.Thread(target=prompt_worker, daemon=True, args=(q, server,)).start() if args.output_directory: output_dir = os.path.abspath(args.output_directory) logging.info(f"Setting output directory to: {output_dir}") folder_paths.set_output_directory(output_dir) folder_paths.add_model_folder_path("checkpoints", os.path.join(folder_paths.get_output_directory(), "checkpoints")) folder_paths.add_model_folder_path("clip", os.path.join(folder_paths.get_output_directory(), "clip")) folder_paths.add_model_folder_path("vae", os.path.join(folder_paths.get_output_directory(), "vae")) if args.input_directory: input_dir = os.path.abspath(args.input_directory) logging.info(f"Setting input directory to: {input_dir}") folder_paths.set_input_directory(input_dir) if args.quick_test_for_ci: exit(0) call_on_start = None if args.auto_launch: def startup_server(scheme, address, port): import webbrowser if os.name == 'nt' and address == '0.0.0.0': address = '127.0.0.1' webbrowser.open(f"{scheme}: call_on_start = startup_server try: loop.run_until_complete(run(server, address=args.listen, port=args.port, verbose=not args.dont_print_server, call_on_start=call_on_start)) except KeyboardInterrupt: logging.info("\nStopped server") cleanup_temp()
import os import shutil base_path = os.path.dirname(os.path.realpath(__file__)) def update_windows_updater(): top_path = os.path.dirname(base_path) updater_path = os.path.join(base_path, ".ci/update_windows/update.py") bat_path = os.path.join(base_path, ".ci/update_windows/update_comfyui.bat") dest_updater_path = os.path.join(top_path, "update/update.py") dest_bat_path = os.path.join(top_path, "update/update_comfyui.bat") dest_bat_deps_path = os.path.join(top_path, "update/update_comfyui_and_python_dependencies.bat") try: with open(dest_bat_path, 'rb') as f: contents = f.read() except: return if not contents.startswith(b"..\\python_embeded\\python.exe .\\update.py"): return shutil.copy(updater_path, dest_updater_path) try: with open(dest_bat_deps_path, 'rb') as f: contents = f.read() contents = contents.replace(b'..\\python_embeded\\python.exe .\\update.py ..\\ComfyUI\\', b'call update_comfyui.bat nopause') with open(dest_bat_deps_path, 'wb') as f: f.write(contents) except: pass shutil.copy(bat_path, dest_bat_path) print("Updated the windows standalone package updater.")
import torch import os import sys import json import hashlib import traceback import math import time import random import logging from PIL import Image, ImageOps, ImageSequence, ImageFile from PIL.PngImagePlugin import PngInfo import numpy as np import safetensors.torch sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), "comfy")) import comfy.diffusers_load import comfy.samplers import comfy.sample import comfy.sd import comfy.utils import comfy.controlnet import comfy.clip_vision import comfy.model_management from comfy.cli_args import args import importlib import folder_paths import latent_preview import node_helpers def before_node_execution(): comfy.model_management.throw_exception_if_processing_interrupted() def interrupt_processing(value=True): comfy.model_management.interrupt_current_processing(value) MAX_RESOLUTION=16384 class CLIPTextEncode: @classmethod def INPUT_TYPES(s): return {"required": {"text": ("STRING", {"multiline": True, "dynamicPrompts": True}), "clip": ("CLIP", )}} RETURN_TYPES = ("CONDITIONING",) FUNCTION = "encode" CATEGORY = "conditioning" def encode(self, clip, text): tokens = clip.tokenize(text) cond, pooled = clip.encode_from_tokens(tokens, return_pooled=True) return ([[cond, {"pooled_output": pooled}]], ) class ConditioningCombine: @classmethod def INPUT_TYPES(s): return {"required": {"conditioning_1": ("CONDITIONING", ), "conditioning_2": ("CONDITIONING", )}} RETURN_TYPES = ("CONDITIONING",) FUNCTION = "combine" CATEGORY = "conditioning" def combine(self, conditioning_1, conditioning_2): return (conditioning_1 + conditioning_2, ) class ConditioningAverage : @classmethod def INPUT_TYPES(s): return {"required": {"conditioning_to": ("CONDITIONING", ), "conditioning_from": ("CONDITIONING", ), "conditioning_to_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}) }} RETURN_TYPES = ("CONDITIONING",) FUNCTION = "addWeighted" CATEGORY = "conditioning" def addWeighted(self, conditioning_to, conditioning_from, conditioning_to_strength): out = [] if len(conditioning_from) > 1: logging.warning("Warning: ConditioningAverage conditioning_from contains more than 1 cond, only the first one will actually be applied to conditioning_to.") cond_from = conditioning_from[0][0] pooled_output_from = conditioning_from[0][1].get("pooled_output", None) for i in range(len(conditioning_to)): t1 = conditioning_to[i][0] pooled_output_to = conditioning_to[i][1].get("pooled_output", pooled_output_from) t0 = cond_from[:,:t1.shape[1]] if t0.shape[1] < t1.shape[1]: t0 = torch.cat([t0] + [torch.zeros((1, (t1.shape[1] - t0.shape[1]), t1.shape[2]))], dim=1) tw = torch.mul(t1, conditioning_to_strength) + torch.mul(t0, (1.0 - conditioning_to_strength)) t_to = conditioning_to[i][1].copy() if pooled_output_from is not None and pooled_output_to is not None: t_to["pooled_output"] = torch.mul(pooled_output_to, conditioning_to_strength) + torch.mul(pooled_output_from, (1.0 - conditioning_to_strength)) elif pooled_output_from is not None: t_to["pooled_output"] = pooled_output_from n = [tw, t_to] out.append(n) return (out, ) class ConditioningConcat: @classmethod def INPUT_TYPES(s): return {"required": { "conditioning_to": ("CONDITIONING",), "conditioning_from": ("CONDITIONING",), }} RETURN_TYPES = ("CONDITIONING",) FUNCTION = "concat" CATEGORY = "conditioning" def concat(self, conditioning_to, conditioning_from): out = [] if len(conditioning_from) > 1: logging.warning("Warning: ConditioningConcat conditioning_from contains more than 1 cond, only the first one will actually be applied to conditioning_to.") cond_from = conditioning_from[0][0] for i in range(len(conditioning_to)): t1 = conditioning_to[i][0] tw = torch.cat((t1, cond_from),1) n = [tw, conditioning_to[i][1].copy()] out.append(n) return (out, ) class ConditioningSetArea: @classmethod def INPUT_TYPES(s): return {"required": {"conditioning": ("CONDITIONING", ), "width": ("INT", {"default": 64, "min": 64, "max": MAX_RESOLUTION, "step": 8}), "height": ("INT", {"default": 64, "min": 64, "max": MAX_RESOLUTION, "step": 8}), "x": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), "y": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), "strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), }} RETURN_TYPES = ("CONDITIONING",) FUNCTION = "append" CATEGORY = "conditioning" def append(self, conditioning, width, height, x, y, strength): c = node_helpers.conditioning_set_values(conditioning, {"area": (height "strength": strength, "set_area_to_bounds": False}) return (c, ) class ConditioningSetAreaPercentage: @classmethod def INPUT_TYPES(s): return {"required": {"conditioning": ("CONDITIONING", ), "width": ("FLOAT", {"default": 1.0, "min": 0, "max": 1.0, "step": 0.01}), "height": ("FLOAT", {"default": 1.0, "min": 0, "max": 1.0, "step": 0.01}), "x": ("FLOAT", {"default": 0, "min": 0, "max": 1.0, "step": 0.01}), "y": ("FLOAT", {"default": 0, "min": 0, "max": 1.0, "step": 0.01}), "strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), }} RETURN_TYPES = ("CONDITIONING",) FUNCTION = "append" CATEGORY = "conditioning" def append(self, conditioning, width, height, x, y, strength): c = node_helpers.conditioning_set_values(conditioning, {"area": ("percentage", height, width, y, x), "strength": strength, "set_area_to_bounds": False}) return (c, ) class ConditioningSetAreaStrength: @classmethod def INPUT_TYPES(s): return {"required": {"conditioning": ("CONDITIONING", ), "strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), }} RETURN_TYPES = ("CONDITIONING",) FUNCTION = "append" CATEGORY = "conditioning" def append(self, conditioning, strength): c = node_helpers.conditioning_set_values(conditioning, {"strength": strength}) return (c, ) class ConditioningSetMask: @classmethod def INPUT_TYPES(s): return {"required": {"conditioning": ("CONDITIONING", ), "mask": ("MASK", ), "strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), "set_cond_area": (["default", "mask bounds"],), }} RETURN_TYPES = ("CONDITIONING",) FUNCTION = "append" CATEGORY = "conditioning" def append(self, conditioning, mask, set_cond_area, strength): set_area_to_bounds = False if set_cond_area != "default": set_area_to_bounds = True if len(mask.shape) < 3: mask = mask.unsqueeze(0) c = node_helpers.conditioning_set_values(conditioning, {"mask": mask, "set_area_to_bounds": set_area_to_bounds, "mask_strength": strength}) return (c, ) class ConditioningZeroOut: @classmethod def INPUT_TYPES(s): return {"required": {"conditioning": ("CONDITIONING", )}} RETURN_TYPES = ("CONDITIONING",) FUNCTION = "zero_out" CATEGORY = "advanced/conditioning" def zero_out(self, conditioning): c = [] for t in conditioning: d = t[1].copy() if "pooled_output" in d: d["pooled_output"] = torch.zeros_like(d["pooled_output"]) n = [torch.zeros_like(t[0]), d] c.append(n) return (c, ) class ConditioningSetTimestepRange: @classmethod def INPUT_TYPES(s): return {"required": {"conditioning": ("CONDITIONING", ), "start": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001}), "end": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001}) }} RETURN_TYPES = ("CONDITIONING",) FUNCTION = "set_range" CATEGORY = "advanced/conditioning" def set_range(self, conditioning, start, end): c = node_helpers.conditioning_set_values(conditioning, {"start_percent": start, "end_percent": end}) return (c, ) class VAEDecode: @classmethod def INPUT_TYPES(s): return {"required": { "samples": ("LATENT", ), "vae": ("VAE", )}} RETURN_TYPES = ("IMAGE",) FUNCTION = "decode" CATEGORY = "latent" def decode(self, vae, samples): return (vae.decode(samples["samples"]), ) class VAEDecodeTiled: @classmethod def INPUT_TYPES(s): return {"required": {"samples": ("LATENT", ), "vae": ("VAE", ), "tile_size": ("INT", {"default": 512, "min": 320, "max": 4096, "step": 64}) }} RETURN_TYPES = ("IMAGE",) FUNCTION = "decode" CATEGORY = "_for_testing" def decode(self, vae, samples, tile_size): return (vae.decode_tiled(samples["samples"], tile_x=tile_size class VAEEncode: @classmethod def INPUT_TYPES(s): return {"required": { "pixels": ("IMAGE", ), "vae": ("VAE", )}} RETURN_TYPES = ("LATENT",) FUNCTION = "encode" CATEGORY = "latent" def encode(self, vae, pixels): t = vae.encode(pixels[:,:,:,:3]) return ({"samples":t}, ) class VAEEncodeTiled: @classmethod def INPUT_TYPES(s): return {"required": {"pixels": ("IMAGE", ), "vae": ("VAE", ), "tile_size": ("INT", {"default": 512, "min": 320, "max": 4096, "step": 64}) }} RETURN_TYPES = ("LATENT",) FUNCTION = "encode" CATEGORY = "_for_testing" def encode(self, vae, pixels, tile_size): t = vae.encode_tiled(pixels[:,:,:,:3], tile_x=tile_size, tile_y=tile_size, ) return ({"samples":t}, ) class VAEEncodeForInpaint: @classmethod def INPUT_TYPES(s): return {"required": { "pixels": ("IMAGE", ), "vae": ("VAE", ), "mask": ("MASK", ), "grow_mask_by": ("INT", {"default": 6, "min": 0, "max": 64, "step": 1}),}} RETURN_TYPES = ("LATENT",) FUNCTION = "encode" CATEGORY = "latent/inpaint" def encode(self, vae, pixels, mask, grow_mask_by=6): x = (pixels.shape[1] y = (pixels.shape[2] mask = torch.nn.functional.interpolate(mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])), size=(pixels.shape[1], pixels.shape[2]), mode="bilinear") pixels = pixels.clone() if pixels.shape[1] != x or pixels.shape[2] != y: x_offset = (pixels.shape[1] % vae.downscale_ratio) y_offset = (pixels.shape[2] % vae.downscale_ratio) pixels = pixels[:,x_offset:x + x_offset, y_offset:y + y_offset,:] mask = mask[:,:,x_offset:x + x_offset, y_offset:y + y_offset] if grow_mask_by == 0: mask_erosion = mask else: kernel_tensor = torch.ones((1, 1, grow_mask_by, grow_mask_by)) padding = math.ceil((grow_mask_by - 1) / 2) mask_erosion = torch.clamp(torch.nn.functional.conv2d(mask.round(), kernel_tensor, padding=padding), 0, 1) m = (1.0 - mask.round()).squeeze(1) for i in range(3): pixels[:,:,:,i] -= 0.5 pixels[:,:,:,i] *= m pixels[:,:,:,i] += 0.5 t = vae.encode(pixels) return ({"samples":t, "noise_mask": (mask_erosion[:,:,:x,:y].round())}, ) class InpaintModelConditioning: @classmethod def INPUT_TYPES(s): return {"required": {"positive": ("CONDITIONING", ), "negative": ("CONDITIONING", ), "vae": ("VAE", ), "pixels": ("IMAGE", ), "mask": ("MASK", ), }} RETURN_TYPES = ("CONDITIONING","CONDITIONING","LATENT") RETURN_NAMES = ("positive", "negative", "latent") FUNCTION = "encode" CATEGORY = "conditioning/inpaint" def encode(self, positive, negative, pixels, vae, mask): x = (pixels.shape[1] y = (pixels.shape[2] mask = torch.nn.functional.interpolate(mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])), size=(pixels.shape[1], pixels.shape[2]), mode="bilinear") orig_pixels = pixels pixels = orig_pixels.clone() if pixels.shape[1] != x or pixels.shape[2] != y: x_offset = (pixels.shape[1] % 8) y_offset = (pixels.shape[2] % 8) pixels = pixels[:,x_offset:x + x_offset, y_offset:y + y_offset,:] mask = mask[:,:,x_offset:x + x_offset, y_offset:y + y_offset] m = (1.0 - mask.round()).squeeze(1) for i in range(3): pixels[:,:,:,i] -= 0.5 pixels[:,:,:,i] *= m pixels[:,:,:,i] += 0.5 concat_latent = vae.encode(pixels) orig_latent = vae.encode(orig_pixels) out_latent = {} out_latent["samples"] = orig_latent out_latent["noise_mask"] = mask out = [] for conditioning in [positive, negative]: c = node_helpers.conditioning_set_values(conditioning, {"concat_latent_image": concat_latent, "concat_mask": mask}) out.append(c) return (out[0], out[1], out_latent) class SaveLatent: def __init__(self): self.output_dir = folder_paths.get_output_directory() @classmethod def INPUT_TYPES(s): return {"required": { "samples": ("LATENT", ), "filename_prefix": ("STRING", {"default": "latents/ComfyUI"})}, "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"}, } RETURN_TYPES = () FUNCTION = "save" OUTPUT_NODE = True CATEGORY = "_for_testing" def save(self, samples, filename_prefix="ComfyUI", prompt=None, extra_pnginfo=None): full_output_folder, filename, counter, subfolder, filename_prefix = folder_paths.get_save_image_path(filename_prefix, self.output_dir) prompt_info = "" if prompt is not None: prompt_info = json.dumps(prompt) metadata = None if not args.disable_metadata: metadata = {"prompt": prompt_info} if extra_pnginfo is not None: for x in extra_pnginfo: metadata[x] = json.dumps(extra_pnginfo[x]) file = f"{filename}_{counter:05}_.latent" results = list() results.append({ "filename": file, "subfolder": subfolder, "type": "output" }) file = os.path.join(full_output_folder, file) output = {} output["latent_tensor"] = samples["samples"] output["latent_format_version_0"] = torch.tensor([]) comfy.utils.save_torch_file(output, file, metadata=metadata) return { "ui": { "latents": results } } class LoadLatent: @classmethod def INPUT_TYPES(s): input_dir = folder_paths.get_input_directory() files = [f for f in os.listdir(input_dir) if os.path.isfile(os.path.join(input_dir, f)) and f.endswith(".latent")] return {"required": {"latent": [sorted(files), ]}, } CATEGORY = "_for_testing" RETURN_TYPES = ("LATENT", ) FUNCTION = "load" def load(self, latent): latent_path = folder_paths.get_annotated_filepath(latent) latent = safetensors.torch.load_file(latent_path, device="cpu") multiplier = 1.0 if "latent_format_version_0" not in latent: multiplier = 1.0 / 0.18215 samples = {"samples": latent["latent_tensor"].float() * multiplier} return (samples, ) @classmethod def IS_CHANGED(s, latent): image_path = folder_paths.get_annotated_filepath(latent) m = hashlib.sha256() with open(image_path, 'rb') as f: m.update(f.read()) return m.digest().hex() @classmethod def VALIDATE_INPUTS(s, latent): if not folder_paths.exists_annotated_filepath(latent): return "Invalid latent file: {}".format(latent) return True class CheckpointLoader: @classmethod def INPUT_TYPES(s): return {"required": { "config_name": (folder_paths.get_filename_list("configs"), ), "ckpt_name": (folder_paths.get_filename_list("checkpoints"), )}} RETURN_TYPES = ("MODEL", "CLIP", "VAE") FUNCTION = "load_checkpoint" CATEGORY = "advanced/loaders" def load_checkpoint(self, config_name, ckpt_name): config_path = folder_paths.get_full_path("configs", config_name) ckpt_path = folder_paths.get_full_path("checkpoints", ckpt_name) return comfy.sd.load_checkpoint(config_path, ckpt_path, output_vae=True, output_clip=True, embedding_directory=folder_paths.get_folder_paths("embeddings")) class CheckpointLoaderSimple: @classmethod def INPUT_TYPES(s): return {"required": { "ckpt_name": (folder_paths.get_filename_list("checkpoints"), ), }} RETURN_TYPES = ("MODEL", "CLIP", "VAE") FUNCTION = "load_checkpoint" CATEGORY = "loaders" def load_checkpoint(self, ckpt_name): ckpt_path = folder_paths.get_full_path("checkpoints", ckpt_name) out = comfy.sd.load_checkpoint_guess_config(ckpt_path, output_vae=True, output_clip=True, embedding_directory=folder_paths.get_folder_paths("embeddings")) return out[:3] class DiffusersLoader: @classmethod def INPUT_TYPES(cls): paths = [] for search_path in folder_paths.get_folder_paths("diffusers"): if os.path.exists(search_path): for root, subdir, files in os.walk(search_path, followlinks=True): if "model_index.json" in files: paths.append(os.path.relpath(root, start=search_path)) return {"required": {"model_path": (paths,), }} RETURN_TYPES = ("MODEL", "CLIP", "VAE") FUNCTION = "load_checkpoint" CATEGORY = "advanced/loaders/deprecated" def load_checkpoint(self, model_path, output_vae=True, output_clip=True): for search_path in folder_paths.get_folder_paths("diffusers"): if os.path.exists(search_path): path = os.path.join(search_path, model_path) if os.path.exists(path): model_path = path break return comfy.diffusers_load.load_diffusers(model_path, output_vae=output_vae, output_clip=output_clip, embedding_directory=folder_paths.get_folder_paths("embeddings")) class unCLIPCheckpointLoader: @classmethod def INPUT_TYPES(s): return {"required": { "ckpt_name": (folder_paths.get_filename_list("checkpoints"), ), }} RETURN_TYPES = ("MODEL", "CLIP", "VAE", "CLIP_VISION") FUNCTION = "load_checkpoint" CATEGORY = "loaders" def load_checkpoint(self, ckpt_name, output_vae=True, output_clip=True): ckpt_path = folder_paths.get_full_path("checkpoints", ckpt_name) out = comfy.sd.load_checkpoint_guess_config(ckpt_path, output_vae=True, output_clip=True, output_clipvision=True, embedding_directory=folder_paths.get_folder_paths("embeddings")) return out class CLIPSetLastLayer: @classmethod def INPUT_TYPES(s): return {"required": { "clip": ("CLIP", ), "stop_at_clip_layer": ("INT", {"default": -1, "min": -24, "max": -1, "step": 1}), }} RETURN_TYPES = ("CLIP",) FUNCTION = "set_last_layer" CATEGORY = "conditioning" def set_last_layer(self, clip, stop_at_clip_layer): clip = clip.clone() clip.clip_layer(stop_at_clip_layer) return (clip,) class LoraLoader: def __init__(self): self.loaded_lora = None @classmethod def INPUT_TYPES(s): return {"required": { "model": ("MODEL",), "clip": ("CLIP", ), "lora_name": (folder_paths.get_filename_list("loras"), ), "strength_model": ("FLOAT", {"default": 1.0, "min": -100.0, "max": 100.0, "step": 0.01}), "strength_clip": ("FLOAT", {"default": 1.0, "min": -100.0, "max": 100.0, "step": 0.01}), }} RETURN_TYPES = ("MODEL", "CLIP") FUNCTION = "load_lora" CATEGORY = "loaders" def load_lora(self, model, clip, lora_name, strength_model, strength_clip): if strength_model == 0 and strength_clip == 0: return (model, clip) lora_path = folder_paths.get_full_path("loras", lora_name) lora = None if self.loaded_lora is not None: if self.loaded_lora[0] == lora_path: lora = self.loaded_lora[1] else: temp = self.loaded_lora self.loaded_lora = None del temp if lora is None: lora = comfy.utils.load_torch_file(lora_path, safe_load=True) self.loaded_lora = (lora_path, lora) model_lora, clip_lora = comfy.sd.load_lora_for_models(model, clip, lora, strength_model, strength_clip) return (model_lora, clip_lora) class LoraLoaderModelOnly(LoraLoader): @classmethod def INPUT_TYPES(s): return {"required": { "model": ("MODEL",), "lora_name": (folder_paths.get_filename_list("loras"), ), "strength_model": ("FLOAT", {"default": 1.0, "min": -100.0, "max": 100.0, "step": 0.01}), }} RETURN_TYPES = ("MODEL",) FUNCTION = "load_lora_model_only" def load_lora_model_only(self, model, lora_name, strength_model): return (self.load_lora(model, None, lora_name, strength_model, 0)[0],) class VAELoader: @staticmethod def vae_list(): vaes = folder_paths.get_filename_list("vae") approx_vaes = folder_paths.get_filename_list("vae_approx") sdxl_taesd_enc = False sdxl_taesd_dec = False sd1_taesd_enc = False sd1_taesd_dec = False sd3_taesd_enc = False sd3_taesd_dec = False for v in approx_vaes: if v.startswith("taesd_decoder."): sd1_taesd_dec = True elif v.startswith("taesd_encoder."): sd1_taesd_enc = True elif v.startswith("taesdxl_decoder."): sdxl_taesd_dec = True elif v.startswith("taesdxl_encoder."): sdxl_taesd_enc = True elif v.startswith("taesd3_decoder."): sd3_taesd_dec = True elif v.startswith("taesd3_encoder."): sd3_taesd_enc = True if sd1_taesd_dec and sd1_taesd_enc: vaes.append("taesd") if sdxl_taesd_dec and sdxl_taesd_enc: vaes.append("taesdxl") if sd3_taesd_dec and sd3_taesd_enc: vaes.append("taesd3") return vaes @staticmethod def load_taesd(name): sd = {} approx_vaes = folder_paths.get_filename_list("vae_approx") encoder = next(filter(lambda a: a.startswith("{}_encoder.".format(name)), approx_vaes)) decoder = next(filter(lambda a: a.startswith("{}_decoder.".format(name)), approx_vaes)) enc = comfy.utils.load_torch_file(folder_paths.get_full_path("vae_approx", encoder)) for k in enc: sd["taesd_encoder.{}".format(k)] = enc[k] dec = comfy.utils.load_torch_file(folder_paths.get_full_path("vae_approx", decoder)) for k in dec: sd["taesd_decoder.{}".format(k)] = dec[k] if name == "taesd": sd["vae_scale"] = torch.tensor(0.18215) sd["vae_shift"] = torch.tensor(0.0) elif name == "taesdxl": sd["vae_scale"] = torch.tensor(0.13025) sd["vae_shift"] = torch.tensor(0.0) elif name == "taesd3": sd["vae_scale"] = torch.tensor(1.5305) sd["vae_shift"] = torch.tensor(0.0609) return sd @classmethod def INPUT_TYPES(s): return {"required": { "vae_name": (s.vae_list(), )}} RETURN_TYPES = ("VAE",) FUNCTION = "load_vae" CATEGORY = "loaders" def load_vae(self, vae_name): if vae_name in ["taesd", "taesdxl", "taesd3"]: sd = self.load_taesd(vae_name) else: vae_path = folder_paths.get_full_path("vae", vae_name) sd = comfy.utils.load_torch_file(vae_path) vae = comfy.sd.VAE(sd=sd) return (vae,) class ControlNetLoader: @classmethod def INPUT_TYPES(s): return {"required": { "control_net_name": (folder_paths.get_filename_list("controlnet"), )}} RETURN_TYPES = ("CONTROL_NET",) FUNCTION = "load_controlnet" CATEGORY = "loaders" def load_controlnet(self, control_net_name): controlnet_path = folder_paths.get_full_path("controlnet", control_net_name) controlnet = comfy.controlnet.load_controlnet(controlnet_path) return (controlnet,) class DiffControlNetLoader: @classmethod def INPUT_TYPES(s): return {"required": { "model": ("MODEL",), "control_net_name": (folder_paths.get_filename_list("controlnet"), )}} RETURN_TYPES = ("CONTROL_NET",) FUNCTION = "load_controlnet" CATEGORY = "loaders" def load_controlnet(self, model, control_net_name): controlnet_path = folder_paths.get_full_path("controlnet", control_net_name) controlnet = comfy.controlnet.load_controlnet(controlnet_path, model) return (controlnet,) class ControlNetApply: @classmethod def INPUT_TYPES(s): return {"required": {"conditioning": ("CONDITIONING", ), "control_net": ("CONTROL_NET", ), "image": ("IMAGE", ), "strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}) }} RETURN_TYPES = ("CONDITIONING",) FUNCTION = "apply_controlnet" CATEGORY = "conditioning" def apply_controlnet(self, conditioning, control_net, image, strength): if strength == 0: return (conditioning, ) c = [] control_hint = image.movedim(-1,1) for t in conditioning: n = [t[0], t[1].copy()] c_net = control_net.copy().set_cond_hint(control_hint, strength) if 'control' in t[1]: c_net.set_previous_controlnet(t[1]['control']) n[1]['control'] = c_net n[1]['control_apply_to_uncond'] = True c.append(n) return (c, ) class ControlNetApplyAdvanced: @classmethod def INPUT_TYPES(s): return {"required": {"positive": ("CONDITIONING", ), "negative": ("CONDITIONING", ), "control_net": ("CONTROL_NET", ), "image": ("IMAGE", ), "strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), "start_percent": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001}), "end_percent": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001}) }} RETURN_TYPES = ("CONDITIONING","CONDITIONING") RETURN_NAMES = ("positive", "negative") FUNCTION = "apply_controlnet" CATEGORY = "conditioning" def apply_controlnet(self, positive, negative, control_net, image, strength, start_percent, end_percent): if strength == 0: return (positive, negative) control_hint = image.movedim(-1,1) cnets = {} out = [] for conditioning in [positive, negative]: c = [] for t in conditioning: d = t[1].copy() prev_cnet = d.get('control', None) if prev_cnet in cnets: c_net = cnets[prev_cnet] else: c_net = control_net.copy().set_cond_hint(control_hint, strength, (start_percent, end_percent)) c_net.set_previous_controlnet(prev_cnet) cnets[prev_cnet] = c_net d['control'] = c_net d['control_apply_to_uncond'] = False n = [t[0], d] c.append(n) out.append(c) return (out[0], out[1]) class UNETLoader: @classmethod def INPUT_TYPES(s): return {"required": { "unet_name": (folder_paths.get_filename_list("unet"), ), }} RETURN_TYPES = ("MODEL",) FUNCTION = "load_unet" CATEGORY = "advanced/loaders" def load_unet(self, unet_name): unet_path = folder_paths.get_full_path("unet", unet_name) model = comfy.sd.load_unet(unet_path) return (model,) class CLIPLoader: @classmethod def INPUT_TYPES(s): return {"required": { "clip_name": (folder_paths.get_filename_list("clip"), ), "type": (["stable_diffusion", "stable_cascade", "sd3", "stable_audio"], ), }} RETURN_TYPES = ("CLIP",) FUNCTION = "load_clip" CATEGORY = "advanced/loaders" def load_clip(self, clip_name, type="stable_diffusion"): if type == "stable_cascade": clip_type = comfy.sd.CLIPType.STABLE_CASCADE elif type == "sd3": clip_type = comfy.sd.CLIPType.SD3 elif type == "stable_audio": clip_type = comfy.sd.CLIPType.STABLE_AUDIO else: clip_type = comfy.sd.CLIPType.STABLE_DIFFUSION clip_path = folder_paths.get_full_path("clip", clip_name) clip = comfy.sd.load_clip(ckpt_paths=[clip_path], embedding_directory=folder_paths.get_folder_paths("embeddings"), clip_type=clip_type) return (clip,) class DualCLIPLoader: @classmethod def INPUT_TYPES(s): return {"required": { "clip_name1": (folder_paths.get_filename_list("clip"), ), "clip_name2": (folder_paths.get_filename_list("clip"), ), "type": (["sdxl", "sd3"], ), }} RETURN_TYPES = ("CLIP",) FUNCTION = "load_clip" CATEGORY = "advanced/loaders" def load_clip(self, clip_name1, clip_name2, type): clip_path1 = folder_paths.get_full_path("clip", clip_name1) clip_path2 = folder_paths.get_full_path("clip", clip_name2) if type == "sdxl": clip_type = comfy.sd.CLIPType.STABLE_DIFFUSION elif type == "sd3": clip_type = comfy.sd.CLIPType.SD3 clip = comfy.sd.load_clip(ckpt_paths=[clip_path1, clip_path2], embedding_directory=folder_paths.get_folder_paths("embeddings"), clip_type=clip_type) return (clip,) class CLIPVisionLoader: @classmethod def INPUT_TYPES(s): return {"required": { "clip_name": (folder_paths.get_filename_list("clip_vision"), ), }} RETURN_TYPES = ("CLIP_VISION",) FUNCTION = "load_clip" CATEGORY = "loaders" def load_clip(self, clip_name): clip_path = folder_paths.get_full_path("clip_vision", clip_name) clip_vision = comfy.clip_vision.load(clip_path) return (clip_vision,) class CLIPVisionEncode: @classmethod def INPUT_TYPES(s): return {"required": { "clip_vision": ("CLIP_VISION",), "image": ("IMAGE",) }} RETURN_TYPES = ("CLIP_VISION_OUTPUT",) FUNCTION = "encode" CATEGORY = "conditioning" def encode(self, clip_vision, image): output = clip_vision.encode_image(image) return (output,) class StyleModelLoader: @classmethod def INPUT_TYPES(s): return {"required": { "style_model_name": (folder_paths.get_filename_list("style_models"), )}} RETURN_TYPES = ("STYLE_MODEL",) FUNCTION = "load_style_model" CATEGORY = "loaders" def load_style_model(self, style_model_name): style_model_path = folder_paths.get_full_path("style_models", style_model_name) style_model = comfy.sd.load_style_model(style_model_path) return (style_model,) class StyleModelApply: @classmethod def INPUT_TYPES(s): return {"required": {"conditioning": ("CONDITIONING", ), "style_model": ("STYLE_MODEL", ), "clip_vision_output": ("CLIP_VISION_OUTPUT", ), }} RETURN_TYPES = ("CONDITIONING",) FUNCTION = "apply_stylemodel" CATEGORY = "conditioning/style_model" def apply_stylemodel(self, clip_vision_output, style_model, conditioning): cond = style_model.get_cond(clip_vision_output).flatten(start_dim=0, end_dim=1).unsqueeze(dim=0) c = [] for t in conditioning: n = [torch.cat((t[0], cond), dim=1), t[1].copy()] c.append(n) return (c, ) class unCLIPConditioning: @classmethod def INPUT_TYPES(s): return {"required": {"conditioning": ("CONDITIONING", ), "clip_vision_output": ("CLIP_VISION_OUTPUT", ), "strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), "noise_augmentation": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.01}), }} RETURN_TYPES = ("CONDITIONING",) FUNCTION = "apply_adm" CATEGORY = "conditioning" def apply_adm(self, conditioning, clip_vision_output, strength, noise_augmentation): if strength == 0: return (conditioning, ) c = [] for t in conditioning: o = t[1].copy() x = {"clip_vision_output": clip_vision_output, "strength": strength, "noise_augmentation": noise_augmentation} if "unclip_conditioning" in o: o["unclip_conditioning"] = o["unclip_conditioning"][:] + [x] else: o["unclip_conditioning"] = [x] n = [t[0], o] c.append(n) return (c, ) class GLIGENLoader: @classmethod def INPUT_TYPES(s): return {"required": { "gligen_name": (folder_paths.get_filename_list("gligen"), )}} RETURN_TYPES = ("GLIGEN",) FUNCTION = "load_gligen" CATEGORY = "loaders" def load_gligen(self, gligen_name): gligen_path = folder_paths.get_full_path("gligen", gligen_name) gligen = comfy.sd.load_gligen(gligen_path) return (gligen,) class GLIGENTextBoxApply: @classmethod def INPUT_TYPES(s): return {"required": {"conditioning_to": ("CONDITIONING", ), "clip": ("CLIP", ), "gligen_textbox_model": ("GLIGEN", ), "text": ("STRING", {"multiline": True, "dynamicPrompts": True}), "width": ("INT", {"default": 64, "min": 8, "max": MAX_RESOLUTION, "step": 8}), "height": ("INT", {"default": 64, "min": 8, "max": MAX_RESOLUTION, "step": 8}), "x": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), "y": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), }} RETURN_TYPES = ("CONDITIONING",) FUNCTION = "append" CATEGORY = "conditioning/gligen" def append(self, conditioning_to, clip, gligen_textbox_model, text, width, height, x, y): c = [] cond, cond_pooled = clip.encode_from_tokens(clip.tokenize(text), return_pooled="unprojected") for t in conditioning_to: n = [t[0], t[1].copy()] position_params = [(cond_pooled, height prev = [] if "gligen" in n[1]: prev = n[1]['gligen'][2] n[1]['gligen'] = ("position", gligen_textbox_model, prev + position_params) c.append(n) return (c, ) class EmptyLatentImage: def __init__(self): self.device = comfy.model_management.intermediate_device() @classmethod def INPUT_TYPES(s): return {"required": { "width": ("INT", {"default": 512, "min": 16, "max": MAX_RESOLUTION, "step": 8}), "height": ("INT", {"default": 512, "min": 16, "max": MAX_RESOLUTION, "step": 8}), "batch_size": ("INT", {"default": 1, "min": 1, "max": 4096})}} RETURN_TYPES = ("LATENT",) FUNCTION = "generate" CATEGORY = "latent" def generate(self, width, height, batch_size=1): latent = torch.zeros([batch_size, 4, height return ({"samples":latent}, ) class LatentFromBatch: @classmethod def INPUT_TYPES(s): return {"required": { "samples": ("LATENT",), "batch_index": ("INT", {"default": 0, "min": 0, "max": 63}), "length": ("INT", {"default": 1, "min": 1, "max": 64}), }} RETURN_TYPES = ("LATENT",) FUNCTION = "frombatch" CATEGORY = "latent/batch" def frombatch(self, samples, batch_index, length): s = samples.copy() s_in = samples["samples"] batch_index = min(s_in.shape[0] - 1, batch_index) length = min(s_in.shape[0] - batch_index, length) s["samples"] = s_in[batch_index:batch_index + length].clone() if "noise_mask" in samples: masks = samples["noise_mask"] if masks.shape[0] == 1: s["noise_mask"] = masks.clone() else: if masks.shape[0] < s_in.shape[0]: masks = masks.repeat(math.ceil(s_in.shape[0] / masks.shape[0]), 1, 1, 1)[:s_in.shape[0]] s["noise_mask"] = masks[batch_index:batch_index + length].clone() if "batch_index" not in s: s["batch_index"] = [x for x in range(batch_index, batch_index+length)] else: s["batch_index"] = samples["batch_index"][batch_index:batch_index + length] return (s,) class RepeatLatentBatch: @classmethod def INPUT_TYPES(s): return {"required": { "samples": ("LATENT",), "amount": ("INT", {"default": 1, "min": 1, "max": 64}), }} RETURN_TYPES = ("LATENT",) FUNCTION = "repeat" CATEGORY = "latent/batch" def repeat(self, samples, amount): s = samples.copy() s_in = samples["samples"] s["samples"] = s_in.repeat((amount, 1,1,1)) if "noise_mask" in samples and samples["noise_mask"].shape[0] > 1: masks = samples["noise_mask"] if masks.shape[0] < s_in.shape[0]: masks = masks.repeat(math.ceil(s_in.shape[0] / masks.shape[0]), 1, 1, 1)[:s_in.shape[0]] s["noise_mask"] = samples["noise_mask"].repeat((amount, 1,1,1)) if "batch_index" in s: offset = max(s["batch_index"]) - min(s["batch_index"]) + 1 s["batch_index"] = s["batch_index"] + [x + (i * offset) for i in range(1, amount) for x in s["batch_index"]] return (s,) class LatentUpscale: upscale_methods = ["nearest-exact", "bilinear", "area", "bicubic", "bislerp"] crop_methods = ["disabled", "center"] @classmethod def INPUT_TYPES(s): return {"required": { "samples": ("LATENT",), "upscale_method": (s.upscale_methods,), "width": ("INT", {"default": 512, "min": 0, "max": MAX_RESOLUTION, "step": 8}), "height": ("INT", {"default": 512, "min": 0, "max": MAX_RESOLUTION, "step": 8}), "crop": (s.crop_methods,)}} RETURN_TYPES = ("LATENT",) FUNCTION = "upscale" CATEGORY = "latent" def upscale(self, samples, upscale_method, width, height, crop): if width == 0 and height == 0: s = samples else: s = samples.copy() if width == 0: height = max(64, height) width = max(64, round(samples["samples"].shape[3] * height / samples["samples"].shape[2])) elif height == 0: width = max(64, width) height = max(64, round(samples["samples"].shape[2] * width / samples["samples"].shape[3])) else: width = max(64, width) height = max(64, height) s["samples"] = comfy.utils.common_upscale(samples["samples"], width return (s,) class LatentUpscaleBy: upscale_methods = ["nearest-exact", "bilinear", "area", "bicubic", "bislerp"] @classmethod def INPUT_TYPES(s): return {"required": { "samples": ("LATENT",), "upscale_method": (s.upscale_methods,), "scale_by": ("FLOAT", {"default": 1.5, "min": 0.01, "max": 8.0, "step": 0.01}),}} RETURN_TYPES = ("LATENT",) FUNCTION = "upscale" CATEGORY = "latent" def upscale(self, samples, upscale_method, scale_by): s = samples.copy() width = round(samples["samples"].shape[3] * scale_by) height = round(samples["samples"].shape[2] * scale_by) s["samples"] = comfy.utils.common_upscale(samples["samples"], width, height, upscale_method, "disabled") return (s,) class LatentRotate: @classmethod def INPUT_TYPES(s): return {"required": { "samples": ("LATENT",), "rotation": (["none", "90 degrees", "180 degrees", "270 degrees"],), }} RETURN_TYPES = ("LATENT",) FUNCTION = "rotate" CATEGORY = "latent/transform" def rotate(self, samples, rotation): s = samples.copy() rotate_by = 0 if rotation.startswith("90"): rotate_by = 1 elif rotation.startswith("180"): rotate_by = 2 elif rotation.startswith("270"): rotate_by = 3 s["samples"] = torch.rot90(samples["samples"], k=rotate_by, dims=[3, 2]) return (s,) class LatentFlip: @classmethod def INPUT_TYPES(s): return {"required": { "samples": ("LATENT",), "flip_method": (["x-axis: vertically", "y-axis: horizontally"],), }} RETURN_TYPES = ("LATENT",) FUNCTION = "flip" CATEGORY = "latent/transform" def flip(self, samples, flip_method): s = samples.copy() if flip_method.startswith("x"): s["samples"] = torch.flip(samples["samples"], dims=[2]) elif flip_method.startswith("y"): s["samples"] = torch.flip(samples["samples"], dims=[3]) return (s,) class LatentComposite: @classmethod def INPUT_TYPES(s): return {"required": { "samples_to": ("LATENT",), "samples_from": ("LATENT",), "x": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), "y": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), "feather": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), }} RETURN_TYPES = ("LATENT",) FUNCTION = "composite" CATEGORY = "latent" def composite(self, samples_to, samples_from, x, y, composite_method="normal", feather=0): x = x y = y feather = feather samples_out = samples_to.copy() s = samples_to["samples"].clone() samples_to = samples_to["samples"] samples_from = samples_from["samples"] if feather == 0: s[:,:,y:y+samples_from.shape[2],x:x+samples_from.shape[3]] = samples_from[:,:,:samples_to.shape[2] - y, :samples_to.shape[3] - x] else: samples_from = samples_from[:,:,:samples_to.shape[2] - y, :samples_to.shape[3] - x] mask = torch.ones_like(samples_from) for t in range(feather): if y != 0: mask[:,:,t:1+t,:] *= ((1.0/feather) * (t + 1)) if y + samples_from.shape[2] < samples_to.shape[2]: mask[:,:,mask.shape[2] -1 -t: mask.shape[2]-t,:] *= ((1.0/feather) * (t + 1)) if x != 0: mask[:,:,:,t:1+t] *= ((1.0/feather) * (t + 1)) if x + samples_from.shape[3] < samples_to.shape[3]: mask[:,:,:,mask.shape[3]- 1 - t: mask.shape[3]- t] *= ((1.0/feather) * (t + 1)) rev_mask = torch.ones_like(mask) - mask s[:,:,y:y+samples_from.shape[2],x:x+samples_from.shape[3]] = samples_from[:,:,:samples_to.shape[2] - y, :samples_to.shape[3] - x] * mask + s[:,:,y:y+samples_from.shape[2],x:x+samples_from.shape[3]] * rev_mask samples_out["samples"] = s return (samples_out,) class LatentBlend: @classmethod def INPUT_TYPES(s): return {"required": { "samples1": ("LATENT",), "samples2": ("LATENT",), "blend_factor": ("FLOAT", { "default": 0.5, "min": 0, "max": 1, "step": 0.01 }), }} RETURN_TYPES = ("LATENT",) FUNCTION = "blend" CATEGORY = "_for_testing" def blend(self, samples1, samples2, blend_factor:float, blend_mode: str="normal"): samples_out = samples1.copy() samples1 = samples1["samples"] samples2 = samples2["samples"] if samples1.shape != samples2.shape: samples2.permute(0, 3, 1, 2) samples2 = comfy.utils.common_upscale(samples2, samples1.shape[3], samples1.shape[2], 'bicubic', crop='center') samples2.permute(0, 2, 3, 1) samples_blended = self.blend_mode(samples1, samples2, blend_mode) samples_blended = samples1 * blend_factor + samples_blended * (1 - blend_factor) samples_out["samples"] = samples_blended return (samples_out,) def blend_mode(self, img1, img2, mode): if mode == "normal": return img2 else: raise ValueError(f"Unsupported blend mode: {mode}") class LatentCrop: @classmethod def INPUT_TYPES(s): return {"required": { "samples": ("LATENT",), "width": ("INT", {"default": 512, "min": 64, "max": MAX_RESOLUTION, "step": 8}), "height": ("INT", {"default": 512, "min": 64, "max": MAX_RESOLUTION, "step": 8}), "x": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), "y": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), }} RETURN_TYPES = ("LATENT",) FUNCTION = "crop" CATEGORY = "latent/transform" def crop(self, samples, width, height, x, y): s = samples.copy() samples = samples['samples'] x = x y = y if x > (samples.shape[3] - 8): x = samples.shape[3] - 8 if y > (samples.shape[2] - 8): y = samples.shape[2] - 8 new_height = height new_width = width to_x = new_width + x to_y = new_height + y s['samples'] = samples[:,:,y:to_y, x:to_x] return (s,) class SetLatentNoiseMask: @classmethod def INPUT_TYPES(s): return {"required": { "samples": ("LATENT",), "mask": ("MASK",), }} RETURN_TYPES = ("LATENT",) FUNCTION = "set_mask" CATEGORY = "latent/inpaint" def set_mask(self, samples, mask): s = samples.copy() s["noise_mask"] = mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])) return (s,) def common_ksampler(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, latent, denoise=1.0, disable_noise=False, start_step=None, last_step=None, force_full_denoise=False): latent_image = latent["samples"] latent_image = comfy.sample.fix_empty_latent_channels(model, latent_image) if disable_noise: noise = torch.zeros(latent_image.size(), dtype=latent_image.dtype, layout=latent_image.layout, device="cpu") else: batch_inds = latent["batch_index"] if "batch_index" in latent else None noise = comfy.sample.prepare_noise(latent_image, seed, batch_inds) noise_mask = None if "noise_mask" in latent: noise_mask = latent["noise_mask"] callback = latent_preview.prepare_callback(model, steps) disable_pbar = not comfy.utils.PROGRESS_BAR_ENABLED samples = comfy.sample.sample(model, noise, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, denoise=denoise, disable_noise=disable_noise, start_step=start_step, last_step=last_step, force_full_denoise=force_full_denoise, noise_mask=noise_mask, callback=callback, disable_pbar=disable_pbar, seed=seed) out = latent.copy() out["samples"] = samples return (out, ) class KSampler: @classmethod def INPUT_TYPES(s): return {"required": {"model": ("MODEL",), "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0, "step":0.1, "round": 0.01}), "sampler_name": (comfy.samplers.KSampler.SAMPLERS, ), "scheduler": (comfy.samplers.KSampler.SCHEDULERS, ), "positive": ("CONDITIONING", ), "negative": ("CONDITIONING", ), "latent_image": ("LATENT", ), "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), } } RETURN_TYPES = ("LATENT",) FUNCTION = "sample" CATEGORY = "sampling" def sample(self, model, seed, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, denoise=1.0): return common_ksampler(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, denoise=denoise) class KSamplerAdvanced: @classmethod def INPUT_TYPES(s): return {"required": {"model": ("MODEL",), "add_noise": (["enable", "disable"], ), "noise_seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0, "step":0.1, "round": 0.01}), "sampler_name": (comfy.samplers.KSampler.SAMPLERS, ), "scheduler": (comfy.samplers.KSampler.SCHEDULERS, ), "positive": ("CONDITIONING", ), "negative": ("CONDITIONING", ), "latent_image": ("LATENT", ), "start_at_step": ("INT", {"default": 0, "min": 0, "max": 10000}), "end_at_step": ("INT", {"default": 10000, "min": 0, "max": 10000}), "return_with_leftover_noise": (["disable", "enable"], ), } } RETURN_TYPES = ("LATENT",) FUNCTION = "sample" CATEGORY = "sampling" def sample(self, model, add_noise, noise_seed, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, start_at_step, end_at_step, return_with_leftover_noise, denoise=1.0): force_full_denoise = True if return_with_leftover_noise == "enable": force_full_denoise = False disable_noise = False if add_noise == "disable": disable_noise = True return common_ksampler(model, noise_seed, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, denoise=denoise, disable_noise=disable_noise, start_step=start_at_step, last_step=end_at_step, force_full_denoise=force_full_denoise) class SaveImage: def __init__(self): self.output_dir = folder_paths.get_output_directory() self.type = "output" self.prefix_append = "" self.compress_level = 4 @classmethod def INPUT_TYPES(s): return {"required": {"images": ("IMAGE", ), "filename_prefix": ("STRING", {"default": "ComfyUI"})}, "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"}, } RETURN_TYPES = () FUNCTION = "save_images" OUTPUT_NODE = True CATEGORY = "image" def save_images(self, images, filename_prefix="ComfyUI", prompt=None, extra_pnginfo=None): filename_prefix += self.prefix_append full_output_folder, filename, counter, subfolder, filename_prefix = folder_paths.get_save_image_path(filename_prefix, self.output_dir, images[0].shape[1], images[0].shape[0]) results = list() for (batch_number, image) in enumerate(images): i = 255. * image.cpu().numpy() img = Image.fromarray(np.clip(i, 0, 255).astype(np.uint8)) metadata = None if not args.disable_metadata: metadata = PngInfo() if prompt is not None: metadata.add_text("prompt", json.dumps(prompt)) if extra_pnginfo is not None: for x in extra_pnginfo: metadata.add_text(x, json.dumps(extra_pnginfo[x])) filename_with_batch_num = filename.replace("%batch_num%", str(batch_number)) file = f"{filename_with_batch_num}_{counter:05}_.png" img.save(os.path.join(full_output_folder, file), pnginfo=metadata, compress_level=self.compress_level) results.append({ "filename": file, "subfolder": subfolder, "type": self.type }) counter += 1 return { "ui": { "images": results } } class PreviewImage(SaveImage): def __init__(self): self.output_dir = folder_paths.get_temp_directory() self.type = "temp" self.prefix_append = "_temp_" + ''.join(random.choice("abcdefghijklmnopqrstupvxyz") for x in range(5)) self.compress_level = 1 @classmethod def INPUT_TYPES(s): return {"required": {"images": ("IMAGE", ), }, "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"}, } class LoadImage: @classmethod def INPUT_TYPES(s): input_dir = folder_paths.get_input_directory() files = [f for f in os.listdir(input_dir) if os.path.isfile(os.path.join(input_dir, f))] return {"required": {"image": (sorted(files), {"image_upload": True})}, } CATEGORY = "image" RETURN_TYPES = ("IMAGE", "MASK") FUNCTION = "load_image" def load_image(self, image): image_path = folder_paths.get_annotated_filepath(image) img = node_helpers.pillow(Image.open, image_path) output_images = [] output_masks = [] w, h = None, None excluded_formats = ['MPO'] for i in ImageSequence.Iterator(img): i = node_helpers.pillow(ImageOps.exif_transpose, i) if i.mode == 'I': i = i.point(lambda i: i * (1 / 255)) image = i.convert("RGB") if len(output_images) == 0: w = image.size[0] h = image.size[1] if image.size[0] != w or image.size[1] != h: continue image = np.array(image).astype(np.float32) / 255.0 image = torch.from_numpy(image)[None,] if 'A' in i.getbands(): mask = np.array(i.getchannel('A')).astype(np.float32) / 255.0 mask = 1. - torch.from_numpy(mask) else: mask = torch.zeros((64,64), dtype=torch.float32, device="cpu") output_images.append(image) output_masks.append(mask.unsqueeze(0)) if len(output_images) > 1 and img.format not in excluded_formats: output_image = torch.cat(output_images, dim=0) output_mask = torch.cat(output_masks, dim=0) else: output_image = output_images[0] output_mask = output_masks[0] return (output_image, output_mask) @classmethod def IS_CHANGED(s, image): image_path = folder_paths.get_annotated_filepath(image) m = hashlib.sha256() with open(image_path, 'rb') as f: m.update(f.read()) return m.digest().hex() @classmethod def VALIDATE_INPUTS(s, image): if not folder_paths.exists_annotated_filepath(image): return "Invalid image file: {}".format(image) return True class LoadImageMask: _color_channels = ["alpha", "red", "green", "blue"] @classmethod def INPUT_TYPES(s): input_dir = folder_paths.get_input_directory() files = [f for f in os.listdir(input_dir) if os.path.isfile(os.path.join(input_dir, f))] return {"required": {"image": (sorted(files), {"image_upload": True}), "channel": (s._color_channels, ), } } CATEGORY = "mask" RETURN_TYPES = ("MASK",) FUNCTION = "load_image" def load_image(self, image, channel): image_path = folder_paths.get_annotated_filepath(image) i = node_helpers.pillow(Image.open, image_path) i = node_helpers.pillow(ImageOps.exif_transpose, i) if i.getbands() != ("R", "G", "B", "A"): if i.mode == 'I': i = i.point(lambda i: i * (1 / 255)) i = i.convert("RGBA") mask = None c = channel[0].upper() if c in i.getbands(): mask = np.array(i.getchannel(c)).astype(np.float32) / 255.0 mask = torch.from_numpy(mask) if c == 'A': mask = 1. - mask else: mask = torch.zeros((64,64), dtype=torch.float32, device="cpu") return (mask.unsqueeze(0),) @classmethod def IS_CHANGED(s, image, channel): image_path = folder_paths.get_annotated_filepath(image) m = hashlib.sha256() with open(image_path, 'rb') as f: m.update(f.read()) return m.digest().hex() @classmethod def VALIDATE_INPUTS(s, image): if not folder_paths.exists_annotated_filepath(image): return "Invalid image file: {}".format(image) return True class ImageScale: upscale_methods = ["nearest-exact", "bilinear", "area", "bicubic", "lanczos"] crop_methods = ["disabled", "center"] @classmethod def INPUT_TYPES(s): return {"required": { "image": ("IMAGE",), "upscale_method": (s.upscale_methods,), "width": ("INT", {"default": 512, "min": 0, "max": MAX_RESOLUTION, "step": 1}), "height": ("INT", {"default": 512, "min": 0, "max": MAX_RESOLUTION, "step": 1}), "crop": (s.crop_methods,)}} RETURN_TYPES = ("IMAGE",) FUNCTION = "upscale" CATEGORY = "image/upscaling" def upscale(self, image, upscale_method, width, height, crop): if width == 0 and height == 0: s = image else: samples = image.movedim(-1,1) if width == 0: width = max(1, round(samples.shape[3] * height / samples.shape[2])) elif height == 0: height = max(1, round(samples.shape[2] * width / samples.shape[3])) s = comfy.utils.common_upscale(samples, width, height, upscale_method, crop) s = s.movedim(1,-1) return (s,) class ImageScaleBy: upscale_methods = ["nearest-exact", "bilinear", "area", "bicubic", "lanczos"] @classmethod def INPUT_TYPES(s): return {"required": { "image": ("IMAGE",), "upscale_method": (s.upscale_methods,), "scale_by": ("FLOAT", {"default": 1.0, "min": 0.01, "max": 8.0, "step": 0.01}),}} RETURN_TYPES = ("IMAGE",) FUNCTION = "upscale" CATEGORY = "image/upscaling" def upscale(self, image, upscale_method, scale_by): samples = image.movedim(-1,1) width = round(samples.shape[3] * scale_by) height = round(samples.shape[2] * scale_by) s = comfy.utils.common_upscale(samples, width, height, upscale_method, "disabled") s = s.movedim(1,-1) return (s,) class ImageInvert: @classmethod def INPUT_TYPES(s): return {"required": { "image": ("IMAGE",)}} RETURN_TYPES = ("IMAGE",) FUNCTION = "invert" CATEGORY = "image" def invert(self, image): s = 1.0 - image return (s,) class ImageBatch: @classmethod def INPUT_TYPES(s): return {"required": { "image1": ("IMAGE",), "image2": ("IMAGE",)}} RETURN_TYPES = ("IMAGE",) FUNCTION = "batch" CATEGORY = "image" def batch(self, image1, image2): if image1.shape[1:] != image2.shape[1:]: image2 = comfy.utils.common_upscale(image2.movedim(-1,1), image1.shape[2], image1.shape[1], "bilinear", "center").movedim(1,-1) s = torch.cat((image1, image2), dim=0) return (s,) class EmptyImage: def __init__(self, device="cpu"): self.device = device @classmethod def INPUT_TYPES(s): return {"required": { "width": ("INT", {"default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1}), "height": ("INT", {"default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1}), "batch_size": ("INT", {"default": 1, "min": 1, "max": 4096}), "color": ("INT", {"default": 0, "min": 0, "max": 0xFFFFFF, "step": 1, "display": "color"}), }} RETURN_TYPES = ("IMAGE",) FUNCTION = "generate" CATEGORY = "image" def generate(self, width, height, batch_size=1, color=0): r = torch.full([batch_size, height, width, 1], ((color >> 16) & 0xFF) / 0xFF) g = torch.full([batch_size, height, width, 1], ((color >> 8) & 0xFF) / 0xFF) b = torch.full([batch_size, height, width, 1], ((color) & 0xFF) / 0xFF) return (torch.cat((r, g, b), dim=-1), ) class ImagePadForOutpaint: @classmethod def INPUT_TYPES(s): return { "required": { "image": ("IMAGE",), "left": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), "top": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), "right": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), "bottom": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), "feathering": ("INT", {"default": 40, "min": 0, "max": MAX_RESOLUTION, "step": 1}), } } RETURN_TYPES = ("IMAGE", "MASK") FUNCTION = "expand_image" CATEGORY = "image" def expand_image(self, image, left, top, right, bottom, feathering): d1, d2, d3, d4 = image.size() new_image = torch.ones( (d1, d2 + top + bottom, d3 + left + right, d4), dtype=torch.float32, ) * 0.5 new_image[:, top:top + d2, left:left + d3, :] = image mask = torch.ones( (d2 + top + bottom, d3 + left + right), dtype=torch.float32, ) t = torch.zeros( (d2, d3), dtype=torch.float32 ) if feathering > 0 and feathering * 2 < d2 and feathering * 2 < d3: for i in range(d2): for j in range(d3): dt = i if top != 0 else d2 db = d2 - i if bottom != 0 else d2 dl = j if left != 0 else d3 dr = d3 - j if right != 0 else d3 d = min(dt, db, dl, dr) if d >= feathering: continue v = (feathering - d) / feathering t[i, j] = v * v mask[top:top + d2, left:left + d3] = t return (new_image, mask) NODE_CLASS_MAPPINGS = { "KSampler": KSampler, "CheckpointLoaderSimple": CheckpointLoaderSimple, "CLIPTextEncode": CLIPTextEncode, "CLIPSetLastLayer": CLIPSetLastLayer, "VAEDecode": VAEDecode, "VAEEncode": VAEEncode, "VAEEncodeForInpaint": VAEEncodeForInpaint, "VAELoader": VAELoader, "EmptyLatentImage": EmptyLatentImage, "LatentUpscale": LatentUpscale, "LatentUpscaleBy": LatentUpscaleBy, "LatentFromBatch": LatentFromBatch, "RepeatLatentBatch": RepeatLatentBatch, "SaveImage": SaveImage, "PreviewImage": PreviewImage, "LoadImage": LoadImage, "LoadImageMask": LoadImageMask, "ImageScale": ImageScale, "ImageScaleBy": ImageScaleBy, "ImageInvert": ImageInvert, "ImageBatch": ImageBatch, "ImagePadForOutpaint": ImagePadForOutpaint, "EmptyImage": EmptyImage, "ConditioningAverage": ConditioningAverage , "ConditioningCombine": ConditioningCombine, "ConditioningConcat": ConditioningConcat, "ConditioningSetArea": ConditioningSetArea, "ConditioningSetAreaPercentage": ConditioningSetAreaPercentage, "ConditioningSetAreaStrength": ConditioningSetAreaStrength, "ConditioningSetMask": ConditioningSetMask, "KSamplerAdvanced": KSamplerAdvanced, "SetLatentNoiseMask": SetLatentNoiseMask, "LatentComposite": LatentComposite, "LatentBlend": LatentBlend, "LatentRotate": LatentRotate, "LatentFlip": LatentFlip, "LatentCrop": LatentCrop, "LoraLoader": LoraLoader, "CLIPLoader": CLIPLoader, "UNETLoader": UNETLoader, "DualCLIPLoader": DualCLIPLoader, "CLIPVisionEncode": CLIPVisionEncode, "StyleModelApply": StyleModelApply, "unCLIPConditioning": unCLIPConditioning, "ControlNetApply": ControlNetApply, "ControlNetApplyAdvanced": ControlNetApplyAdvanced, "ControlNetLoader": ControlNetLoader, "DiffControlNetLoader": DiffControlNetLoader, "StyleModelLoader": StyleModelLoader, "CLIPVisionLoader": CLIPVisionLoader, "VAEDecodeTiled": VAEDecodeTiled, "VAEEncodeTiled": VAEEncodeTiled, "unCLIPCheckpointLoader": unCLIPCheckpointLoader, "GLIGENLoader": GLIGENLoader, "GLIGENTextBoxApply": GLIGENTextBoxApply, "InpaintModelConditioning": InpaintModelConditioning, "CheckpointLoader": CheckpointLoader, "DiffusersLoader": DiffusersLoader, "LoadLatent": LoadLatent, "SaveLatent": SaveLatent, "ConditioningZeroOut": ConditioningZeroOut, "ConditioningSetTimestepRange": ConditioningSetTimestepRange, "LoraLoaderModelOnly": LoraLoaderModelOnly, } NODE_DISPLAY_NAME_MAPPINGS = { "KSampler": "KSampler", "KSamplerAdvanced": "KSampler (Advanced)", "CheckpointLoader": "Load Checkpoint With Config (DEPRECATED)", "CheckpointLoaderSimple": "Load Checkpoint", "VAELoader": "Load VAE", "LoraLoader": "Load LoRA", "CLIPLoader": "Load CLIP", "ControlNetLoader": "Load ControlNet Model", "DiffControlNetLoader": "Load ControlNet Model (diff)", "StyleModelLoader": "Load Style Model", "CLIPVisionLoader": "Load CLIP Vision", "UpscaleModelLoader": "Load Upscale Model", "CLIPVisionEncode": "CLIP Vision Encode", "StyleModelApply": "Apply Style Model", "CLIPTextEncode": "CLIP Text Encode (Prompt)", "CLIPSetLastLayer": "CLIP Set Last Layer", "ConditioningCombine": "Conditioning (Combine)", "ConditioningAverage ": "Conditioning (Average)", "ConditioningConcat": "Conditioning (Concat)", "ConditioningSetArea": "Conditioning (Set Area)", "ConditioningSetAreaPercentage": "Conditioning (Set Area with Percentage)", "ConditioningSetMask": "Conditioning (Set Mask)", "ControlNetApply": "Apply ControlNet", "ControlNetApplyAdvanced": "Apply ControlNet (Advanced)", "VAEEncodeForInpaint": "VAE Encode (for Inpainting)", "SetLatentNoiseMask": "Set Latent Noise Mask", "VAEDecode": "VAE Decode", "VAEEncode": "VAE Encode", "LatentRotate": "Rotate Latent", "LatentFlip": "Flip Latent", "LatentCrop": "Crop Latent", "EmptyLatentImage": "Empty Latent Image", "LatentUpscale": "Upscale Latent", "LatentUpscaleBy": "Upscale Latent By", "LatentComposite": "Latent Composite", "LatentBlend": "Latent Blend", "LatentFromBatch" : "Latent From Batch", "RepeatLatentBatch": "Repeat Latent Batch", "SaveImage": "Save Image", "PreviewImage": "Preview Image", "LoadImage": "Load Image", "LoadImageMask": "Load Image (as Mask)", "ImageScale": "Upscale Image", "ImageScaleBy": "Upscale Image By", "ImageUpscaleWithModel": "Upscale Image (using Model)", "ImageInvert": "Invert Image", "ImagePadForOutpaint": "Pad Image for Outpainting", "ImageBatch": "Batch Images", "VAEDecodeTiled": "VAE Decode (Tiled)", "VAEEncodeTiled": "VAE Encode (Tiled)", } EXTENSION_WEB_DIRS = {} def load_custom_node(module_path, ignore=set()): module_name = os.path.basename(module_path) if os.path.isfile(module_path): sp = os.path.splitext(module_path) module_name = sp[0] try: logging.debug("Trying to load custom node {}".format(module_path)) if os.path.isfile(module_path): module_spec = importlib.util.spec_from_file_location(module_name, module_path) module_dir = os.path.split(module_path)[0] else: module_spec = importlib.util.spec_from_file_location(module_name, os.path.join(module_path, "__init__.py")) module_dir = module_path module = importlib.util.module_from_spec(module_spec) sys.modules[module_name] = module module_spec.loader.exec_module(module) if hasattr(module, "WEB_DIRECTORY") and getattr(module, "WEB_DIRECTORY") is not None: web_dir = os.path.abspath(os.path.join(module_dir, getattr(module, "WEB_DIRECTORY"))) if os.path.isdir(web_dir): EXTENSION_WEB_DIRS[module_name] = web_dir if hasattr(module, "NODE_CLASS_MAPPINGS") and getattr(module, "NODE_CLASS_MAPPINGS") is not None: for name in module.NODE_CLASS_MAPPINGS: if name not in ignore: NODE_CLASS_MAPPINGS[name] = module.NODE_CLASS_MAPPINGS[name] if hasattr(module, "NODE_DISPLAY_NAME_MAPPINGS") and getattr(module, "NODE_DISPLAY_NAME_MAPPINGS") is not None: NODE_DISPLAY_NAME_MAPPINGS.update(module.NODE_DISPLAY_NAME_MAPPINGS) return True else: logging.warning(f"Skip {module_path} module for custom nodes due to the lack of NODE_CLASS_MAPPINGS.") return False except Exception as e: logging.warning(traceback.format_exc()) logging.warning(f"Cannot import {module_path} module for custom nodes: {e}") return False def load_custom_nodes(): base_node_names = set(NODE_CLASS_MAPPINGS.keys()) node_paths = folder_paths.get_folder_paths("custom_nodes") node_import_times = [] for custom_node_path in node_paths: possible_modules = os.listdir(os.path.realpath(custom_node_path)) if "__pycache__" in possible_modules: possible_modules.remove("__pycache__") for possible_module in possible_modules: module_path = os.path.join(custom_node_path, possible_module) if os.path.isfile(module_path) and os.path.splitext(module_path)[1] != ".py": continue if module_path.endswith(".disabled"): continue time_before = time.perf_counter() success = load_custom_node(module_path, base_node_names) node_import_times.append((time.perf_counter() - time_before, module_path, success)) if len(node_import_times) > 0: logging.info("\nImport times for custom nodes:") for n in sorted(node_import_times): if n[2]: import_message = "" else: import_message = " (IMPORT FAILED)" logging.info("{:6.1f} seconds{}: {}".format(n[0], import_message, n[1])) logging.info("") def init_custom_nodes(): extras_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "comfy_extras") extras_files = [ "nodes_latent.py", "nodes_hypernetwork.py", "nodes_upscale_model.py", "nodes_post_processing.py", "nodes_mask.py", "nodes_compositing.py", "nodes_rebatch.py", "nodes_model_merging.py", "nodes_tomesd.py", "nodes_clip_sdxl.py", "nodes_canny.py", "nodes_freelunch.py", "nodes_custom_sampler.py", "nodes_hypertile.py", "nodes_model_advanced.py", "nodes_model_downscale.py", "nodes_images.py", "nodes_video_model.py", "nodes_sag.py", "nodes_perpneg.py", "nodes_stable3d.py", "nodes_sdupscale.py", "nodes_photomaker.py", "nodes_cond.py", "nodes_morphology.py", "nodes_stable_cascade.py", "nodes_differential_diffusion.py", "nodes_ip2p.py", "nodes_model_merging_model_specific.py", "nodes_pag.py", "nodes_align_your_steps.py", "nodes_attention_multiply.py", "nodes_advanced_samplers.py", "nodes_webcam.py", "nodes_audio.py", "nodes_sd3.py", "nodes_gits.py", ] import_failed = [] for node_file in extras_files: if not load_custom_node(os.path.join(extras_dir, node_file)): import_failed.append(node_file) load_custom_nodes() if len(import_failed) > 0: logging.warning("WARNING: some comfy_extras/ nodes did not import correctly. This may be because they are missing some dependencies.\n") for node in import_failed: logging.warning("IMPORT FAILED: {}".format(node)) logging.warning("\nThis issue might be caused by new missing dependencies added the last time you updated ComfyUI.") if args.windows_standalone_build: logging.warning("Please run the update script: update/update_comfyui.bat") else: logging.warning("Please do a: pip install -r requirements.txt") logging.warning("")
from PIL import ImageFile, UnidentifiedImageError def conditioning_set_values(conditioning, values={}): c = [] for t in conditioning: n = [t[0], t[1].copy()] for k in values: n[1][k] = values[k] c.append(n) return c def pillow(fn, arg): prev_value = None try: x = fn(arg) except (OSError, UnidentifiedImageError, ValueError): prev_value = ImageFile.LOAD_TRUNCATED_IMAGES ImageFile.LOAD_TRUNCATED_IMAGES = True x = fn(arg) finally: if prev_value is not None: ImageFile.LOAD_TRUNCATED_IMAGES = prev_value return x
ComfyUI ======= The most powerful and modular stable diffusion GUI and backend. ----------- ![ComfyUI Screenshot](comfyui_screenshot.png) This ui will let you design and execute advanced stable diffusion pipelines using a graph/nodes/flowchart based interface. For some workflow examples and see what ComfyUI can do you can check out: - Nodes/graph/flowchart interface to experiment and create complex Stable Diffusion workflows without needing to code anything. - Fully supports SD1.x, SD2.x, [SDXL](https: - Asynchronous Queue system - Many optimizations: Only re-executes the parts of the workflow that changes between executions. - Command line option: ```--lowvram``` to make it work on GPUs with less than 3GB vram (enabled automatically on GPUs with low vram) - Works even if you don't have a GPU with: ```--cpu``` (slow) - Can load ckpt, safetensors and diffusers models/checkpoints. Standalone VAEs and CLIP models. - Embeddings/Textual inversion - [Loras (regular, locon and loha)](https: - [Hypernetworks](https: - Loading full workflows (with seeds) from generated PNG files. - Saving/Loading workflows as Json files. - Nodes interface can be used to create complex workflows like one for [Hires fix](https: - [Area Composition](https: - [Inpainting](https: - [ControlNet and T2I-Adapter](https: - [Upscale Models (ESRGAN, ESRGAN variants, SwinIR, Swin2SR, etc...)](https: - [unCLIP Models](https: - [GLIGEN](https: - [Model Merging](https: - [LCM models and Loras](https: - [SDXL Turbo](https: - Latent previews with [TAESD]( - Starts up very fast. - Works fully offline: will never download anything. - [Config file](extra_model_paths.yaml.example) to set the search paths for models. Workflow examples can be found on the [Examples page](https: | Keybind | Explanation | |------------------------------------|--------------------------------------------------------------------------------------------------------------------| | Ctrl + Enter | Queue up current graph for generation | | Ctrl + Shift + Enter | Queue up current graph as first for generation | | Ctrl + Z/Ctrl + Y | Undo/Redo | | Ctrl + S | Save workflow | | Ctrl + O | Load workflow | | Ctrl + A | Select all nodes | | Alt + C | Collapse/uncollapse selected nodes | | Ctrl + M | Mute/unmute selected nodes | | Ctrl + B | Bypass selected nodes (acts like the node was removed from the graph and the wires reconnected through) | | Delete/Backspace | Delete selected nodes | | Ctrl + Backspace | Delete the current graph | | Space | Move the canvas around when held and moving the cursor | | Ctrl/Shift + Click | Add clicked node to selection | | Ctrl + C/Ctrl + V | Copy and paste selected nodes (without maintaining connections to outputs of unselected nodes) | | Ctrl + C/Ctrl + Shift + V | Copy and paste selected nodes (maintaining connections from outputs of unselected nodes to inputs of pasted nodes) | | Shift + Drag | Move multiple selected nodes at the same time | | Ctrl + D | Load default graph | | Alt + `+` | Canvas Zoom in | | Alt + `-` | Canvas Zoom out | | Ctrl + Shift + LMB + Vertical drag | Canvas Zoom in/out | | Q | Toggle visibility of the queue | | H | Toggle visibility of history | | R | Refresh graph | | Double-Click LMB | Open node quick search palette | Ctrl can also be replaced with Cmd instead for macOS users There is a portable standalone build for Windows that should work for running on Nvidia GPUs or for running on your CPU only on the [releases page](https: Simply download, extract with [7-Zip](https: If you have trouble extracting it, right click the file -> properties -> unblock See the [Config file](extra_model_paths.yaml.example) to set the search paths for models. In the standalone windows build you can find this file in the ComfyUI directory. Rename this file to extra_model_paths.yaml and edit it with your favorite text editor. To run it on services like paperspace, kaggle or colab you can use my [Jupyter Notebook](notebooks/comfyui_colab.ipynb) Git clone this repo. Put your SD checkpoints (the huge ckpt/safetensors files) in: models/checkpoints Put your VAE in: models/vae AMD users can install rocm and pytorch with pip if you don't have it already installed, this is the command to install the stable version: ```pip install torch torchvision torchaudio --index-url https: This is the command to install the nightly with ROCm 6.0 which might have some performance improvements: ```pip install --pre torch torchvision torchaudio --index-url https: Nvidia users should install stable pytorch using this command: ```pip install torch torchvision torchaudio --extra-index-url https: This is the command to install pytorch nightly instead which might have performance improvements: ```pip install --pre torch torchvision torchaudio --index-url https: If you get the "Torch not compiled with CUDA enabled" error, uninstall torch with: ```pip uninstall torch``` And install it again with the command above. Install the dependencies by opening your terminal inside the ComfyUI folder and: ```pip install -r requirements.txt``` After this you should have everything installed and can proceed to running ComfyUI. Intel GPU support is available for all Intel GPUs supported by Intel's Extension for Pytorch (IPEX) with the support requirements listed in the [Installation](https: 1. Start by installing the drivers or kernel listed or newer in the Installation page of IPEX linked above for Windows and Linux if needed. 1. Follow the instructions to install [Intel's oneAPI Basekit](https: 1. Install the packages for IPEX using the instructions provided in the Installation page for your platform. 1. Follow the [ComfyUI manual installation]( Additional discussion and help can be found [here](https: You can install ComfyUI in Apple Mac silicon (M1 or M2) with any recent macOS version. 1. Install pytorch nightly. For instructions, read the [Accelerated PyTorch training on Mac](https: 1. Follow the [ComfyUI manual installation]( 1. Install the ComfyUI [dependencies]( 1. Launch ComfyUI by running `python main.py` > **Note**: Remember to add your models, VAE, LoRAs etc. to the corresponding Comfy folders, as discussed in [ComfyUI manual installation]( ```pip install torch-directml``` Then you can launch ComfyUI with: ```python main.py --directml``` You don't. If you have another UI installed and working with its own python venv you can use that venv to run ComfyUI. You can open up your favorite terminal and activate it: ```source path_to_other_sd_gui/venv/bin/activate``` or on Windows: With Powershell: ```"path_to_other_sd_gui\venv\Scripts\Activate.ps1"``` With cmd.exe: ```"path_to_other_sd_gui\venv\Scripts\activate.bat"``` And then you can use that terminal to run ComfyUI without installing any dependencies. Note that the venv folder might be called something else depending on the SD UI. ```python main.py``` Try running it with this command if you have issues: For 6700, 6600 and maybe other RDNA2 or older: ```HSA_OVERRIDE_GFX_VERSION=10.3.0 python main.py``` For AMD 7600 and maybe other RDNA3 cards: ```HSA_OVERRIDE_GFX_VERSION=11.0.0 python main.py``` Only parts of the graph that have an output with all the correct inputs will be executed. Only parts of the graph that change from each execution to the next will be executed, if you submit the same graph twice only the first will be executed. If you change the last part of the graph only the part you changed and the part that depends on it will be executed. Dragging a generated png on the webpage or loading one will give you the full workflow including seeds that were used to create it. You can use () to change emphasis of a word or phrase like: (good code:1.2) or (bad code:0.8). The default emphasis for () is 1.1. To use () characters in your actual prompt escape them like \\( or \\). You can use {day|night}, for wildcard/dynamic prompts. With this syntax "{wild|card|test}" will be randomly replaced by either "wild", "card" or "test" by the frontend every time you queue the prompt. To use {} characters in your actual prompt escape them like: \\{ or \\}. Dynamic prompts also support C-style comments, like ` To use a textual inversion concepts/embeddings in a text prompt put them in the models/embeddings directory and use them in the CLIPTextEncode node like this (you can omit the .pt extension): ```embedding:embedding_filename.pt``` Use ```--preview-method auto``` to enable previews. The default installation includes a fast latent preview method that's low-resolution. To enable higher-quality previews with [TAESD](https: Generate a self-signed certificate (not appropriate for shared/production use) and key by running the command: `openssl req -x509 -newkey rsa:4096 -keyout key.pem -out cert.pem -sha256 -days 3650 -nodes -subj "/C=XX/ST=StateName/L=CityName/O=CompanyName/OU=CompanySectionName/CN=CommonNameOrHostname"` Use `--tls-keyfile key.pem --tls-certfile cert.pem` to enable TLS/SSL, the app will now be accessible with `https: > Note: Windows users can use [alexisrolland/docker-openssl](https: <br/><br/>If you use a container, note that the volume mount `-v` can be a relative path so `... -v ".\:/openssl-certs" ...` would create the key & cert files in the current directory of your command prompt or powershell terminal. [Matrix space: I wanted to learn how Stable Diffusion worked in detail. I also wanted something clean and powerful that would let me experiment with SD without restrictions. This is for anyone that wants to make complex workflows with SD or that wants to learn more how SD works. The interface follows closely how SD works and the code should be much more simple to understand than other SD UIs.
import os import sys import asyncio import traceback import nodes import folder_paths import execution import uuid import urllib import json import glob import struct import ssl from PIL import Image, ImageOps from PIL.PngImagePlugin import PngInfo from io import BytesIO import aiohttp from aiohttp import web import logging import mimetypes from comfy.cli_args import args import comfy.utils import comfy.model_management from app.user_manager import UserManager class BinaryEventTypes: PREVIEW_IMAGE = 1 UNENCODED_PREVIEW_IMAGE = 2 async def send_socket_catch_exception(function, message): try: await function(message) except (aiohttp.ClientError, aiohttp.ClientPayloadError, ConnectionResetError) as err: logging.warning("send error: {}".format(err)) @web.middleware async def cache_control(request: web.Request, handler): response: web.Response = await handler(request) if request.path.endswith('.js') or request.path.endswith('.css'): response.headers.setdefault('Cache-Control', 'no-cache') return response def create_cors_middleware(allowed_origin: str): @web.middleware async def cors_middleware(request: web.Request, handler): if request.method == "OPTIONS": response = web.Response() else: response = await handler(request) response.headers['Access-Control-Allow-Origin'] = allowed_origin response.headers['Access-Control-Allow-Methods'] = 'POST, GET, DELETE, PUT, OPTIONS' response.headers['Access-Control-Allow-Headers'] = 'Content-Type, Authorization' response.headers['Access-Control-Allow-Credentials'] = 'true' return response return cors_middleware class PromptServer(): def __init__(self, loop): PromptServer.instance = self mimetypes.init() mimetypes.types_map['.js'] = 'application/javascript; charset=utf-8' self.user_manager = UserManager() self.supports = ["custom_nodes_from_web"] self.prompt_queue = None self.loop = loop self.messages = asyncio.Queue() self.number = 0 middlewares = [cache_control] if args.enable_cors_header: middlewares.append(create_cors_middleware(args.enable_cors_header)) max_upload_size = round(args.max_upload_size * 1024 * 1024) self.app = web.Application(client_max_size=max_upload_size, middlewares=middlewares) self.sockets = dict() self.web_root = os.path.join(os.path.dirname( os.path.realpath(__file__)), "web") routes = web.RouteTableDef() self.routes = routes self.last_node_id = None self.client_id = None self.on_prompt_handlers = [] @routes.get('/ws') async def websocket_handler(request): ws = web.WebSocketResponse() await ws.prepare(request) sid = request.rel_url.query.get('clientId', '') if sid: self.sockets.pop(sid, None) else: sid = uuid.uuid4().hex self.sockets[sid] = ws try: await self.send("status", { "status": self.get_queue_info(), 'sid': sid }, sid) if self.client_id == sid and self.last_node_id is not None: await self.send("executing", { "node": self.last_node_id }, sid) async for msg in ws: if msg.type == aiohttp.WSMsgType.ERROR: logging.warning('ws connection closed with exception %s' % ws.exception()) finally: self.sockets.pop(sid, None) return ws @routes.get("/") async def get_root(request): return web.FileResponse(os.path.join(self.web_root, "index.html")) @routes.get("/embeddings") def get_embeddings(self): embeddings = folder_paths.get_filename_list("embeddings") return web.json_response(list(map(lambda a: os.path.splitext(a)[0], embeddings))) @routes.get("/extensions") async def get_extensions(request): files = glob.glob(os.path.join( glob.escape(self.web_root), 'extensions*.js'), recursive=True) extensions = list(map(lambda f: "/" + os.path.relpath(f, self.web_root).replace("\\", "/"), files)) for name, dir in nodes.EXTENSION_WEB_DIRS.items(): files = glob.glob(os.path.join(glob.escape(dir), '**/*.js'), recursive=True) extensions.extend(list(map(lambda f: "/extensions/" + urllib.parse.quote( name) + "/" + os.path.relpath(f, dir).replace("\\", "/"), files))) return web.json_response(extensions) def get_dir_by_type(dir_type): if dir_type is None: dir_type = "input" if dir_type == "input": type_dir = folder_paths.get_input_directory() elif dir_type == "temp": type_dir = folder_paths.get_temp_directory() elif dir_type == "output": type_dir = folder_paths.get_output_directory() return type_dir, dir_type def image_upload(post, image_save_function=None): image = post.get("image") overwrite = post.get("overwrite") image_upload_type = post.get("type") upload_dir, image_upload_type = get_dir_by_type(image_upload_type) if image and image.file: filename = image.filename if not filename: return web.Response(status=400) subfolder = post.get("subfolder", "") full_output_folder = os.path.join(upload_dir, os.path.normpath(subfolder)) filepath = os.path.abspath(os.path.join(full_output_folder, filename)) if os.path.commonpath((upload_dir, filepath)) != upload_dir: return web.Response(status=400) if not os.path.exists(full_output_folder): os.makedirs(full_output_folder) split = os.path.splitext(filename) if overwrite is not None and (overwrite == "true" or overwrite == "1"): pass else: i = 1 while os.path.exists(filepath): filename = f"{split[0]} ({i}){split[1]}" filepath = os.path.join(full_output_folder, filename) i += 1 if image_save_function is not None: image_save_function(image, post, filepath) else: with open(filepath, "wb") as f: f.write(image.file.read()) return web.json_response({"name" : filename, "subfolder": subfolder, "type": image_upload_type}) else: return web.Response(status=400) @routes.post("/upload/image") async def upload_image(request): post = await request.post() return image_upload(post) @routes.post("/upload/mask") async def upload_mask(request): post = await request.post() def image_save_function(image, post, filepath): original_ref = json.loads(post.get("original_ref")) filename, output_dir = folder_paths.annotated_filepath(original_ref['filename']) if filename[0] == '/' or '..' in filename: return web.Response(status=400) if output_dir is None: type = original_ref.get("type", "output") output_dir = folder_paths.get_directory_by_type(type) if output_dir is None: return web.Response(status=400) if original_ref.get("subfolder", "") != "": full_output_dir = os.path.join(output_dir, original_ref["subfolder"]) if os.path.commonpath((os.path.abspath(full_output_dir), output_dir)) != output_dir: return web.Response(status=403) output_dir = full_output_dir file = os.path.join(output_dir, filename) if os.path.isfile(file): with Image.open(file) as original_pil: metadata = PngInfo() if hasattr(original_pil,'text'): for key in original_pil.text: metadata.add_text(key, original_pil.text[key]) original_pil = original_pil.convert('RGBA') mask_pil = Image.open(image.file).convert('RGBA') new_alpha = mask_pil.getchannel('A') original_pil.putalpha(new_alpha) original_pil.save(filepath, compress_level=4, pnginfo=metadata) return image_upload(post, image_save_function) @routes.get("/view") async def view_image(request): if "filename" in request.rel_url.query: filename = request.rel_url.query["filename"] filename,output_dir = folder_paths.annotated_filepath(filename) if filename[0] == '/' or '..' in filename: return web.Response(status=400) if output_dir is None: type = request.rel_url.query.get("type", "output") output_dir = folder_paths.get_directory_by_type(type) if output_dir is None: return web.Response(status=400) if "subfolder" in request.rel_url.query: full_output_dir = os.path.join(output_dir, request.rel_url.query["subfolder"]) if os.path.commonpath((os.path.abspath(full_output_dir), output_dir)) != output_dir: return web.Response(status=403) output_dir = full_output_dir filename = os.path.basename(filename) file = os.path.join(output_dir, filename) if os.path.isfile(file): if 'preview' in request.rel_url.query: with Image.open(file) as img: preview_info = request.rel_url.query['preview'].split(';') image_format = preview_info[0] if image_format not in ['webp', 'jpeg'] or 'a' in request.rel_url.query.get('channel', ''): image_format = 'webp' quality = 90 if preview_info[-1].isdigit(): quality = int(preview_info[-1]) buffer = BytesIO() if image_format in ['jpeg'] or request.rel_url.query.get('channel', '') == 'rgb': img = img.convert("RGB") img.save(buffer, format=image_format, quality=quality) buffer.seek(0) return web.Response(body=buffer.read(), content_type=f'image/{image_format}', headers={"Content-Disposition": f"filename=\"{filename}\""}) if 'channel' not in request.rel_url.query: channel = 'rgba' else: channel = request.rel_url.query["channel"] if channel == 'rgb': with Image.open(file) as img: if img.mode == "RGBA": r, g, b, a = img.split() new_img = Image.merge('RGB', (r, g, b)) else: new_img = img.convert("RGB") buffer = BytesIO() new_img.save(buffer, format='PNG') buffer.seek(0) return web.Response(body=buffer.read(), content_type='image/png', headers={"Content-Disposition": f"filename=\"{filename}\""}) elif channel == 'a': with Image.open(file) as img: if img.mode == "RGBA": _, _, _, a = img.split() else: a = Image.new('L', img.size, 255) alpha_img = Image.new('RGBA', img.size) alpha_img.putalpha(a) alpha_buffer = BytesIO() alpha_img.save(alpha_buffer, format='PNG') alpha_buffer.seek(0) return web.Response(body=alpha_buffer.read(), content_type='image/png', headers={"Content-Disposition": f"filename=\"{filename}\""}) else: return web.FileResponse(file, headers={"Content-Disposition": f"filename=\"{filename}\""}) return web.Response(status=404) @routes.get("/view_metadata/{folder_name}") async def view_metadata(request): folder_name = request.match_info.get("folder_name", None) if folder_name is None: return web.Response(status=404) if not "filename" in request.rel_url.query: return web.Response(status=404) filename = request.rel_url.query["filename"] if not filename.endswith(".safetensors"): return web.Response(status=404) safetensors_path = folder_paths.get_full_path(folder_name, filename) if safetensors_path is None: return web.Response(status=404) out = comfy.utils.safetensors_header(safetensors_path, max_size=1024*1024) if out is None: return web.Response(status=404) dt = json.loads(out) if not "__metadata__" in dt: return web.Response(status=404) return web.json_response(dt["__metadata__"]) @routes.get("/system_stats") async def get_queue(request): device = comfy.model_management.get_torch_device() device_name = comfy.model_management.get_torch_device_name(device) vram_total, torch_vram_total = comfy.model_management.get_total_memory(device, torch_total_too=True) vram_free, torch_vram_free = comfy.model_management.get_free_memory(device, torch_free_too=True) system_stats = { "system": { "os": os.name, "python_version": sys.version, "embedded_python": os.path.split(os.path.split(sys.executable)[0])[1] == "python_embeded" }, "devices": [ { "name": device_name, "type": device.type, "index": device.index, "vram_total": vram_total, "vram_free": vram_free, "torch_vram_total": torch_vram_total, "torch_vram_free": torch_vram_free, } ] } return web.json_response(system_stats) @routes.get("/prompt") async def get_prompt(request): return web.json_response(self.get_queue_info()) def node_info(node_class): obj_class = nodes.NODE_CLASS_MAPPINGS[node_class] info = {} info['input'] = obj_class.INPUT_TYPES() info['output'] = obj_class.RETURN_TYPES info['output_is_list'] = obj_class.OUTPUT_IS_LIST if hasattr(obj_class, 'OUTPUT_IS_LIST') else [False] * len(obj_class.RETURN_TYPES) info['output_name'] = obj_class.RETURN_NAMES if hasattr(obj_class, 'RETURN_NAMES') else info['output'] info['name'] = node_class info['display_name'] = nodes.NODE_DISPLAY_NAME_MAPPINGS[node_class] if node_class in nodes.NODE_DISPLAY_NAME_MAPPINGS.keys() else node_class info['description'] = obj_class.DESCRIPTION if hasattr(obj_class,'DESCRIPTION') else '' info['category'] = 'sd' if hasattr(obj_class, 'OUTPUT_NODE') and obj_class.OUTPUT_NODE == True: info['output_node'] = True else: info['output_node'] = False if hasattr(obj_class, 'CATEGORY'): info['category'] = obj_class.CATEGORY return info @routes.get("/object_info") async def get_object_info(request): out = {} for x in nodes.NODE_CLASS_MAPPINGS: try: out[x] = node_info(x) except Exception as e: logging.error(f"[ERROR] An error occurred while retrieving information for the '{x}' node.") logging.error(traceback.format_exc()) return web.json_response(out) @routes.get("/object_info/{node_class}") async def get_object_info_node(request): node_class = request.match_info.get("node_class", None) out = {} if (node_class is not None) and (node_class in nodes.NODE_CLASS_MAPPINGS): out[node_class] = node_info(node_class) return web.json_response(out) @routes.get("/history") async def get_history(request): max_items = request.rel_url.query.get("max_items", None) if max_items is not None: max_items = int(max_items) return web.json_response(self.prompt_queue.get_history(max_items=max_items)) @routes.get("/history/{prompt_id}") async def get_history(request): prompt_id = request.match_info.get("prompt_id", None) return web.json_response(self.prompt_queue.get_history(prompt_id=prompt_id)) @routes.get("/queue") async def get_queue(request): queue_info = {} current_queue = self.prompt_queue.get_current_queue() queue_info['queue_running'] = current_queue[0] queue_info['queue_pending'] = current_queue[1] return web.json_response(queue_info) @routes.post("/prompt") async def post_prompt(request): logging.info("got prompt") resp_code = 200 out_string = "" json_data = await request.json() json_data = self.trigger_on_prompt(json_data) if "number" in json_data: number = float(json_data['number']) else: number = self.number if "front" in json_data: if json_data['front']: number = -number self.number += 1 if "prompt" in json_data: prompt = json_data["prompt"] valid = execution.validate_prompt(prompt) extra_data = {} if "extra_data" in json_data: extra_data = json_data["extra_data"] if "client_id" in json_data: extra_data["client_id"] = json_data["client_id"] if valid[0]: prompt_id = str(uuid.uuid4()) outputs_to_execute = valid[2] self.prompt_queue.put((number, prompt_id, prompt, extra_data, outputs_to_execute)) response = {"prompt_id": prompt_id, "number": number, "node_errors": valid[3]} return web.json_response(response) else: logging.warning("invalid prompt: {}".format(valid[1])) return web.json_response({"error": valid[1], "node_errors": valid[3]}, status=400) else: return web.json_response({"error": "no prompt", "node_errors": []}, status=400) @routes.post("/queue") async def post_queue(request): json_data = await request.json() if "clear" in json_data: if json_data["clear"]: self.prompt_queue.wipe_queue() if "delete" in json_data: to_delete = json_data['delete'] for id_to_delete in to_delete: delete_func = lambda a: a[1] == id_to_delete self.prompt_queue.delete_queue_item(delete_func) return web.Response(status=200) @routes.post("/interrupt") async def post_interrupt(request): nodes.interrupt_processing() return web.Response(status=200) @routes.post("/free") async def post_free(request): json_data = await request.json() unload_models = json_data.get("unload_models", False) free_memory = json_data.get("free_memory", False) if unload_models: self.prompt_queue.set_flag("unload_models", unload_models) if free_memory: self.prompt_queue.set_flag("free_memory", free_memory) return web.Response(status=200) @routes.post("/history") async def post_history(request): json_data = await request.json() if "clear" in json_data: if json_data["clear"]: self.prompt_queue.wipe_history() if "delete" in json_data: to_delete = json_data['delete'] for id_to_delete in to_delete: self.prompt_queue.delete_history_item(id_to_delete) return web.Response(status=200) def add_routes(self): self.user_manager.add_routes(self.routes) api_routes = web.RouteTableDef() for route in self.routes: if isinstance(route, web.RouteDef): api_routes.route(route.method, "/api" + route.path)(route.handler, **route.kwargs) self.app.add_routes(api_routes) self.app.add_routes(self.routes) for name, dir in nodes.EXTENSION_WEB_DIRS.items(): self.app.add_routes([ web.static('/extensions/' + urllib.parse.quote(name), dir), ]) self.app.add_routes([ web.static('/', self.web_root), ]) def get_queue_info(self): prompt_info = {} exec_info = {} exec_info['queue_remaining'] = self.prompt_queue.get_tasks_remaining() prompt_info['exec_info'] = exec_info return prompt_info async def send(self, event, data, sid=None): if event == BinaryEventTypes.UNENCODED_PREVIEW_IMAGE: await self.send_image(data, sid=sid) elif isinstance(data, (bytes, bytearray)): await self.send_bytes(event, data, sid) else: await self.send_json(event, data, sid) def encode_bytes(self, event, data): if not isinstance(event, int): raise RuntimeError(f"Binary event types must be integers, got {event}") packed = struct.pack(">I", event) message = bytearray(packed) message.extend(data) return message async def send_image(self, image_data, sid=None): image_type = image_data[0] image = image_data[1] max_size = image_data[2] if max_size is not None: if hasattr(Image, 'Resampling'): resampling = Image.Resampling.BILINEAR else: resampling = Image.ANTIALIAS image = ImageOps.contain(image, (max_size, max_size), resampling) type_num = 1 if image_type == "JPEG": type_num = 1 elif image_type == "PNG": type_num = 2 bytesIO = BytesIO() header = struct.pack(">I", type_num) bytesIO.write(header) image.save(bytesIO, format=image_type, quality=95, compress_level=1) preview_bytes = bytesIO.getvalue() await self.send_bytes(BinaryEventTypes.PREVIEW_IMAGE, preview_bytes, sid=sid) async def send_bytes(self, event, data, sid=None): message = self.encode_bytes(event, data) if sid is None: sockets = list(self.sockets.values()) for ws in sockets: await send_socket_catch_exception(ws.send_bytes, message) elif sid in self.sockets: await send_socket_catch_exception(self.sockets[sid].send_bytes, message) async def send_json(self, event, data, sid=None): message = {"type": event, "data": data} if sid is None: sockets = list(self.sockets.values()) for ws in sockets: await send_socket_catch_exception(ws.send_json, message) elif sid in self.sockets: await send_socket_catch_exception(self.sockets[sid].send_json, message) def send_sync(self, event, data, sid=None): self.loop.call_soon_threadsafe( self.messages.put_nowait, (event, data, sid)) def queue_updated(self): self.send_sync("status", { "status": self.get_queue_info() }) async def publish_loop(self): while True: msg = await self.messages.get() await self.send(*msg) async def start(self, address, port, verbose=True, call_on_start=None): runner = web.AppRunner(self.app, access_log=None) await runner.setup() ssl_ctx = None scheme = "http" if args.tls_keyfile and args.tls_certfile: ssl_ctx = ssl.SSLContext(protocol=ssl.PROTOCOL_TLS_SERVER, verify_mode=ssl.CERT_NONE) ssl_ctx.load_cert_chain(certfile=args.tls_certfile, keyfile=args.tls_keyfile) scheme = "https" site = web.TCPSite(runner, address, port, ssl_context=ssl_ctx) await site.start() if verbose: logging.info("Starting server\n") logging.info("To see the GUI go to: {}: if call_on_start is not None: call_on_start(scheme, address, port) def add_on_prompt_handler(self, handler): self.on_prompt_handlers.append(handler) def trigger_on_prompt(self, json_data): for handler in self.on_prompt_handlers: try: json_data = handler(json_data) except Exception as e: logging.warning(f"[ERROR] An error occurred during the on_prompt_handler processing") logging.warning(traceback.format_exc()) return json_data
import pygit2 from datetime import datetime import sys import os import shutil import filecmp def pull(repo, remote_name='origin', branch='master'): for remote in repo.remotes: if remote.name == remote_name: remote.fetch() remote_master_id = repo.lookup_reference('refs/remotes/origin/%s' % (branch)).target merge_result, _ = repo.merge_analysis(remote_master_id) if merge_result & pygit2.GIT_MERGE_ANALYSIS_UP_TO_DATE: return elif merge_result & pygit2.GIT_MERGE_ANALYSIS_FASTFORWARD: repo.checkout_tree(repo.get(remote_master_id)) try: master_ref = repo.lookup_reference('refs/heads/%s' % (branch)) master_ref.set_target(remote_master_id) except KeyError: repo.create_branch(branch, repo.get(remote_master_id)) repo.head.set_target(remote_master_id) elif merge_result & pygit2.GIT_MERGE_ANALYSIS_NORMAL: repo.merge(remote_master_id) if repo.index.conflicts is not None: for conflict in repo.index.conflicts: print('Conflicts found in:', conflict[0].path) raise AssertionError('Conflicts, ahhhhh!!') user = repo.default_signature tree = repo.index.write_tree() commit = repo.create_commit('HEAD', user, user, 'Merge!', tree, [repo.head.target, remote_master_id]) repo.state_cleanup() else: raise AssertionError('Unknown merge analysis result') pygit2.option(pygit2.GIT_OPT_SET_OWNER_VALIDATION, 0) repo_path = str(sys.argv[1]) repo = pygit2.Repository(repo_path) ident = pygit2.Signature('comfyui', 'comfy@ui') try: print("stashing current changes") repo.stash(ident) except KeyError: print("nothing to stash") backup_branch_name = 'backup_branch_{}'.format(datetime.today().strftime('%Y-%m-%d_%H_%M_%S')) print("creating backup branch: {}".format(backup_branch_name)) try: repo.branches.local.create(backup_branch_name, repo.head.peel()) except: pass print("checking out master branch") branch = repo.lookup_branch('master') ref = repo.lookup_reference(branch.name) repo.checkout(ref) print("pulling latest changes") pull(repo) print("Done!") self_update = True if len(sys.argv) > 2: self_update = '--skip_self_update' not in sys.argv update_py_path = os.path.realpath(__file__) repo_update_py_path = os.path.join(repo_path, ".ci/update_windows/update.py") cur_path = os.path.dirname(update_py_path) req_path = os.path.join(cur_path, "current_requirements.txt") repo_req_path = os.path.join(repo_path, "requirements.txt") def files_equal(file1, file2): try: return filecmp.cmp(file1, file2, shallow=False) except: return False def file_size(f): try: return os.path.getsize(f) except: return 0 if self_update and not files_equal(update_py_path, repo_update_py_path) and file_size(repo_update_py_path) > 10: shutil.copy(repo_update_py_path, os.path.join(cur_path, "update_new.py")) exit() if not os.path.exists(req_path) or not files_equal(repo_req_path, req_path): import subprocess try: subprocess.check_call([sys.executable, '-s', '-m', 'pip', 'install', '-r', repo_req_path]) shutil.copy(repo_req_path, req_path) except: pass
name: Tests CI on: [push, pull_request] jobs: test: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: actions/setup-node@v3 with: node-version: 18 - uses: actions/setup-python@v4 with: python-version: '3.10' - name: Install requirements run: | python -m pip install --upgrade pip pip install torch torchvision torchaudio --index-url https: pip install -r requirements.txt - name: Run Tests run: | npm ci npm run test:generate npm test -- --verbose working-directory: ./tests-ui
import os import json from aiohttp import web class AppSettings(): def __init__(self, user_manager): self.user_manager = user_manager def get_settings(self, request): file = self.user_manager.get_request_user_filepath( request, "comfy.settings.json") if os.path.isfile(file): with open(file) as f: return json.load(f) else: return {} def save_settings(self, request, settings): file = self.user_manager.get_request_user_filepath( request, "comfy.settings.json") with open(file, "w") as f: f.write(json.dumps(settings, indent=4)) def add_routes(self, routes): @routes.get("/settings") async def get_settings(request): return web.json_response(self.get_settings(request)) @routes.get("/settings/{id}") async def get_setting(request): value = None settings = self.get_settings(request) setting_id = request.match_info.get("id", None) if setting_id and setting_id in settings: value = settings[setting_id] return web.json_response(value) @routes.post("/settings") async def post_settings(request): settings = self.get_settings(request) new_settings = await request.json() self.save_settings(request, {**settings, **new_settings}) return web.Response(status=200) @routes.post("/settings/{id}") async def post_setting(request): setting_id = request.match_info.get("id", None) if not setting_id: return web.Response(status=400) settings = self.get_settings(request) settings[setting_id] = await request.json() self.save_settings(request, settings) return web.Response(status=200)
import json import os import re import uuid from aiohttp import web from comfy.cli_args import args from folder_paths import user_directory from .app_settings import AppSettings default_user = "default" users_file = os.path.join(user_directory, "users.json") class UserManager(): def __init__(self): global user_directory self.settings = AppSettings(self) if not os.path.exists(user_directory): os.mkdir(user_directory) if not args.multi_user: print("****** User settings have been changed to be stored on the server instead of browser storage. ******") print("****** For multi-user setups add the --multi-user CLI argument to enable multiple user profiles. ******") if args.multi_user: if os.path.isfile(users_file): with open(users_file) as f: self.users = json.load(f) else: self.users = {} else: self.users = {"default": "default"} def get_request_user_id(self, request): user = "default" if args.multi_user and "comfy-user" in request.headers: user = request.headers["comfy-user"] if user not in self.users: raise KeyError("Unknown user: " + user) return user def get_request_user_filepath(self, request, file, type="userdata", create_dir=True): global user_directory if type == "userdata": root_dir = user_directory else: raise KeyError("Unknown filepath type:" + type) user = self.get_request_user_id(request) path = user_root = os.path.abspath(os.path.join(root_dir, user)) if os.path.commonpath((root_dir, user_root)) != root_dir: return None parent = user_root if file is not None: path = os.path.abspath(os.path.join(user_root, file)) if os.path.commonpath((user_root, path)) != user_root: return None if create_dir and not os.path.exists(parent): os.mkdir(parent) return path def add_user(self, name): name = name.strip() if not name: raise ValueError("username not provided") user_id = re.sub("[^a-zA-Z0-9-_]+", '-', name) user_id = user_id + "_" + str(uuid.uuid4()) self.users[user_id] = name global users_file with open(users_file, "w") as f: json.dump(self.users, f) return user_id def add_routes(self, routes): self.settings.add_routes(routes) @routes.get("/users") async def get_users(request): if args.multi_user: return web.json_response({"storage": "server", "users": self.users}) else: user_dir = self.get_request_user_filepath(request, None, create_dir=False) return web.json_response({ "storage": "server", "migrated": os.path.exists(user_dir) }) @routes.post("/users") async def post_users(request): body = await request.json() username = body["username"] if username in self.users.values(): return web.json_response({"error": "Duplicate username."}, status=400) user_id = self.add_user(username) return web.json_response(user_id) @routes.get("/userdata/{file}") async def getuserdata(request): file = request.match_info.get("file", None) if not file: return web.Response(status=400) path = self.get_request_user_filepath(request, file) if not path: return web.Response(status=403) if not os.path.exists(path): return web.Response(status=404) return web.FileResponse(path) @routes.post("/userdata/{file}") async def post_userdata(request): file = request.match_info.get("file", None) if not file: return web.Response(status=400) path = self.get_request_user_filepath(request, file) if not path: return web.Response(status=403) body = await request.read() with open(path, "wb") as f: f.write(body) return web.Response(status=200)
import pickle load = pickle.load class Empty: pass class Unpickler(pickle.Unpickler): def find_class(self, module, name): if module.startswith("pytorch_lightning"): return Empty return super().find_class(module, name)
{ "architectures": [ "CLIPTextModel" ], "attention_dropout": 0.0, "bos_token_id": 0, "dropout": 0.0, "eos_token_id": 2, "hidden_act": "gelu", "hidden_size": 1280, "initializer_factor": 1.0, "initializer_range": 0.02, "intermediate_size": 5120, "layer_norm_eps": 1e-05, "max_position_embeddings": 77, "model_type": "clip_text_model", "num_attention_heads": 20, "num_hidden_layers": 32, "pad_token_id": 1, "projection_dim": 1280, "torch_dtype": "float32", "vocab_size": 49408 }
import torch from comfy.ldm.modules.attention import optimized_attention_for_device class CLIPAttention(torch.nn.Module): def __init__(self, embed_dim, heads, dtype, device, operations): super().__init__() self.heads = heads self.q_proj = operations.Linear(embed_dim, embed_dim, bias=True, dtype=dtype, device=device) self.k_proj = operations.Linear(embed_dim, embed_dim, bias=True, dtype=dtype, device=device) self.v_proj = operations.Linear(embed_dim, embed_dim, bias=True, dtype=dtype, device=device) self.out_proj = operations.Linear(embed_dim, embed_dim, bias=True, dtype=dtype, device=device) def forward(self, x, mask=None, optimized_attention=None): q = self.q_proj(x) k = self.k_proj(x) v = self.v_proj(x) out = optimized_attention(q, k, v, self.heads, mask) return self.out_proj(out) ACTIVATIONS = {"quick_gelu": lambda a: a * torch.sigmoid(1.702 * a), "gelu": torch.nn.functional.gelu, } class CLIPMLP(torch.nn.Module): def __init__(self, embed_dim, intermediate_size, activation, dtype, device, operations): super().__init__() self.fc1 = operations.Linear(embed_dim, intermediate_size, bias=True, dtype=dtype, device=device) self.activation = ACTIVATIONS[activation] self.fc2 = operations.Linear(intermediate_size, embed_dim, bias=True, dtype=dtype, device=device) def forward(self, x): x = self.fc1(x) x = self.activation(x) x = self.fc2(x) return x class CLIPLayer(torch.nn.Module): def __init__(self, embed_dim, heads, intermediate_size, intermediate_activation, dtype, device, operations): super().__init__() self.layer_norm1 = operations.LayerNorm(embed_dim, dtype=dtype, device=device) self.self_attn = CLIPAttention(embed_dim, heads, dtype, device, operations) self.layer_norm2 = operations.LayerNorm(embed_dim, dtype=dtype, device=device) self.mlp = CLIPMLP(embed_dim, intermediate_size, intermediate_activation, dtype, device, operations) def forward(self, x, mask=None, optimized_attention=None): x += self.self_attn(self.layer_norm1(x), mask, optimized_attention) x += self.mlp(self.layer_norm2(x)) return x class CLIPEncoder(torch.nn.Module): def __init__(self, num_layers, embed_dim, heads, intermediate_size, intermediate_activation, dtype, device, operations): super().__init__() self.layers = torch.nn.ModuleList([CLIPLayer(embed_dim, heads, intermediate_size, intermediate_activation, dtype, device, operations) for i in range(num_layers)]) def forward(self, x, mask=None, intermediate_output=None): optimized_attention = optimized_attention_for_device(x.device, mask=mask is not None, small_input=True) if intermediate_output is not None: if intermediate_output < 0: intermediate_output = len(self.layers) + intermediate_output intermediate = None for i, l in enumerate(self.layers): x = l(x, mask, optimized_attention) if i == intermediate_output: intermediate = x.clone() return x, intermediate class CLIPEmbeddings(torch.nn.Module): def __init__(self, embed_dim, vocab_size=49408, num_positions=77, dtype=None, device=None): super().__init__() self.token_embedding = torch.nn.Embedding(vocab_size, embed_dim, dtype=dtype, device=device) self.position_embedding = torch.nn.Embedding(num_positions, embed_dim, dtype=dtype, device=device) def forward(self, input_tokens): return self.token_embedding(input_tokens) + self.position_embedding.weight class CLIPTextModel_(torch.nn.Module): def __init__(self, config_dict, dtype, device, operations): num_layers = config_dict["num_hidden_layers"] embed_dim = config_dict["hidden_size"] heads = config_dict["num_attention_heads"] intermediate_size = config_dict["intermediate_size"] intermediate_activation = config_dict["hidden_act"] super().__init__() self.embeddings = CLIPEmbeddings(embed_dim, dtype=torch.float32, device=device) self.encoder = CLIPEncoder(num_layers, embed_dim, heads, intermediate_size, intermediate_activation, dtype, device, operations) self.final_layer_norm = operations.LayerNorm(embed_dim, dtype=dtype, device=device) def forward(self, input_tokens, attention_mask=None, intermediate_output=None, final_layer_norm_intermediate=True): x = self.embeddings(input_tokens) mask = None if attention_mask is not None: mask = 1.0 - attention_mask.to(x.dtype).reshape((attention_mask.shape[0], 1, -1, attention_mask.shape[-1])).expand(attention_mask.shape[0], 1, attention_mask.shape[-1], attention_mask.shape[-1]) mask = mask.masked_fill(mask.to(torch.bool), float("-inf")) causal_mask = torch.empty(x.shape[1], x.shape[1], dtype=x.dtype, device=x.device).fill_(float("-inf")).triu_(1) if mask is not None: mask += causal_mask else: mask = causal_mask x, i = self.encoder(x, mask=mask, intermediate_output=intermediate_output) x = self.final_layer_norm(x) if i is not None and final_layer_norm_intermediate: i = self.final_layer_norm(i) pooled_output = x[torch.arange(x.shape[0], device=x.device), input_tokens.to(dtype=torch.int, device=x.device).argmax(dim=-1),] return x, i, pooled_output class CLIPTextModel(torch.nn.Module): def __init__(self, config_dict, dtype, device, operations): super().__init__() self.num_layers = config_dict["num_hidden_layers"] self.text_model = CLIPTextModel_(config_dict, dtype, device, operations) embed_dim = config_dict["hidden_size"] self.text_projection = operations.Linear(embed_dim, embed_dim, bias=False, dtype=dtype, device=device) self.text_projection.weight.copy_(torch.eye(embed_dim)) self.dtype = dtype def get_input_embeddings(self): return self.text_model.embeddings.token_embedding def set_input_embeddings(self, embeddings): self.text_model.embeddings.token_embedding = embeddings def forward(self, *args, **kwargs): x = self.text_model(*args, **kwargs) out = self.text_projection(x[2]) return (x[0], x[1], out, x[2]) class CLIPVisionEmbeddings(torch.nn.Module): def __init__(self, embed_dim, num_channels=3, patch_size=14, image_size=224, dtype=None, device=None, operations=None): super().__init__() self.class_embedding = torch.nn.Parameter(torch.empty(embed_dim, dtype=dtype, device=device)) self.patch_embedding = operations.Conv2d( in_channels=num_channels, out_channels=embed_dim, kernel_size=patch_size, stride=patch_size, bias=False, dtype=dtype, device=device ) num_patches = (image_size num_positions = num_patches + 1 self.position_embedding = torch.nn.Embedding(num_positions, embed_dim, dtype=dtype, device=device) def forward(self, pixel_values): embeds = self.patch_embedding(pixel_values).flatten(2).transpose(1, 2) return torch.cat([self.class_embedding.to(embeds.device).expand(pixel_values.shape[0], 1, -1), embeds], dim=1) + self.position_embedding.weight.to(embeds.device) class CLIPVision(torch.nn.Module): def __init__(self, config_dict, dtype, device, operations): super().__init__() num_layers = config_dict["num_hidden_layers"] embed_dim = config_dict["hidden_size"] heads = config_dict["num_attention_heads"] intermediate_size = config_dict["intermediate_size"] intermediate_activation = config_dict["hidden_act"] self.embeddings = CLIPVisionEmbeddings(embed_dim, config_dict["num_channels"], config_dict["patch_size"], config_dict["image_size"], dtype=torch.float32, device=device, operations=operations) self.pre_layrnorm = operations.LayerNorm(embed_dim) self.encoder = CLIPEncoder(num_layers, embed_dim, heads, intermediate_size, intermediate_activation, dtype, device, operations) self.post_layernorm = operations.LayerNorm(embed_dim) def forward(self, pixel_values, attention_mask=None, intermediate_output=None): x = self.embeddings(pixel_values) x = self.pre_layrnorm(x) x, i = self.encoder(x, mask=None, intermediate_output=intermediate_output) pooled_output = self.post_layernorm(x[:, 0, :]) return x, i, pooled_output class CLIPVisionModelProjection(torch.nn.Module): def __init__(self, config_dict, dtype, device, operations): super().__init__() self.vision_model = CLIPVision(config_dict, dtype, device, operations) self.visual_projection = operations.Linear(config_dict["hidden_size"], config_dict["projection_dim"], bias=False) def forward(self, *args, **kwargs): x = self.vision_model(*args, **kwargs) out = self.visual_projection(x[2]) return (x[0], x[1], out)
from .utils import load_torch_file, transformers_convert, state_dict_prefix_replace import os import torch import json import logging import comfy.ops import comfy.model_patcher import comfy.model_management import comfy.utils import comfy.clip_model class Output: def __getitem__(self, key): return getattr(self, key) def __setitem__(self, key, item): setattr(self, key, item) def clip_preprocess(image, size=224): mean = torch.tensor([ 0.48145466,0.4578275,0.40821073], device=image.device, dtype=image.dtype) std = torch.tensor([0.26862954,0.26130258,0.27577711], device=image.device, dtype=image.dtype) image = image.movedim(-1, 1) if not (image.shape[2] == size and image.shape[3] == size): scale = (size / min(image.shape[2], image.shape[3])) image = torch.nn.functional.interpolate(image, size=(round(scale * image.shape[2]), round(scale * image.shape[3])), mode="bicubic", antialias=True) h = (image.shape[2] - size) w = (image.shape[3] - size) image = image[:,:,h:h+size,w:w+size] image = torch.clip((255. * image), 0, 255).round() / 255.0 return (image - mean.view([3,1,1])) / std.view([3,1,1]) class ClipVisionModel(): def __init__(self, json_config): with open(json_config) as f: config = json.load(f) self.load_device = comfy.model_management.text_encoder_device() offload_device = comfy.model_management.text_encoder_offload_device() self.dtype = comfy.model_management.text_encoder_dtype(self.load_device) self.model = comfy.clip_model.CLIPVisionModelProjection(config, self.dtype, offload_device, comfy.ops.manual_cast) self.model.eval() self.patcher = comfy.model_patcher.ModelPatcher(self.model, load_device=self.load_device, offload_device=offload_device) def load_sd(self, sd): return self.model.load_state_dict(sd, strict=False) def get_sd(self): return self.model.state_dict() def encode_image(self, image): comfy.model_management.load_model_gpu(self.patcher) pixel_values = clip_preprocess(image.to(self.load_device)).float() out = self.model(pixel_values=pixel_values, intermediate_output=-2) outputs = Output() outputs["last_hidden_state"] = out[0].to(comfy.model_management.intermediate_device()) outputs["image_embeds"] = out[2].to(comfy.model_management.intermediate_device()) outputs["penultimate_hidden_states"] = out[1].to(comfy.model_management.intermediate_device()) return outputs def convert_to_transformers(sd, prefix): sd_k = sd.keys() if "{}transformer.resblocks.0.attn.in_proj_weight".format(prefix) in sd_k: keys_to_replace = { "{}class_embedding".format(prefix): "vision_model.embeddings.class_embedding", "{}conv1.weight".format(prefix): "vision_model.embeddings.patch_embedding.weight", "{}positional_embedding".format(prefix): "vision_model.embeddings.position_embedding.weight", "{}ln_post.bias".format(prefix): "vision_model.post_layernorm.bias", "{}ln_post.weight".format(prefix): "vision_model.post_layernorm.weight", "{}ln_pre.bias".format(prefix): "vision_model.pre_layrnorm.bias", "{}ln_pre.weight".format(prefix): "vision_model.pre_layrnorm.weight", } for x in keys_to_replace: if x in sd_k: sd[keys_to_replace[x]] = sd.pop(x) if "{}proj".format(prefix) in sd_k: sd['visual_projection.weight'] = sd.pop("{}proj".format(prefix)).transpose(0, 1) sd = transformers_convert(sd, prefix, "vision_model.", 48) else: replace_prefix = {prefix: ""} sd = state_dict_prefix_replace(sd, replace_prefix) return sd def load_clipvision_from_sd(sd, prefix="", convert_keys=False): if convert_keys: sd = convert_to_transformers(sd, prefix) if "vision_model.encoder.layers.47.layer_norm1.weight" in sd: json_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "clip_vision_config_g.json") elif "vision_model.encoder.layers.30.layer_norm1.weight" in sd: json_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "clip_vision_config_h.json") elif "vision_model.encoder.layers.22.layer_norm1.weight" in sd: json_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "clip_vision_config_vitl.json") else: return None clip = ClipVisionModel(json_config) m, u = clip.load_sd(sd) if len(m) > 0: logging.warning("missing clip vision: {}".format(m)) u = set(u) keys = list(sd.keys()) for k in keys: if k not in u: t = sd.pop(k) del t return clip def load(ckpt_path): sd = load_torch_file(ckpt_path) if "visual.transformer.resblocks.0.attn.in_proj_weight" in sd: return load_clipvision_from_sd(sd, prefix="visual.", convert_keys=True) else: return load_clipvision_from_sd(sd)
{ "attention_dropout": 0.0, "dropout": 0.0, "hidden_act": "gelu", "hidden_size": 1664, "image_size": 224, "initializer_factor": 1.0, "initializer_range": 0.02, "intermediate_size": 8192, "layer_norm_eps": 1e-05, "model_type": "clip_vision_model", "num_attention_heads": 16, "num_channels": 3, "num_hidden_layers": 48, "patch_size": 14, "projection_dim": 1280, "torch_dtype": "float32" }
{ "attention_dropout": 0.0, "dropout": 0.0, "hidden_act": "gelu", "hidden_size": 1280, "image_size": 224, "initializer_factor": 1.0, "initializer_range": 0.02, "intermediate_size": 5120, "layer_norm_eps": 1e-05, "model_type": "clip_vision_model", "num_attention_heads": 16, "num_channels": 3, "num_hidden_layers": 32, "patch_size": 14, "projection_dim": 1024, "torch_dtype": "float32" }
{ "attention_dropout": 0.0, "dropout": 0.0, "hidden_act": "quick_gelu", "hidden_size": 1024, "image_size": 224, "initializer_factor": 1.0, "initializer_range": 0.02, "intermediate_size": 4096, "layer_norm_eps": 1e-05, "model_type": "clip_vision_model", "num_attention_heads": 16, "num_channels": 3, "num_hidden_layers": 24, "patch_size": 14, "projection_dim": 768, "torch_dtype": "float32" }
import argparse import enum import comfy.options class EnumAction(argparse.Action): """ Argparse action for handling Enums """ def __init__(self, **kwargs): enum_type = kwargs.pop("type", None) if enum_type is None: raise ValueError("type must be assigned an Enum when using EnumAction") if not issubclass(enum_type, enum.Enum): raise TypeError("type must be an Enum when using EnumAction") choices = tuple(e.value for e in enum_type) kwargs.setdefault("choices", choices) kwargs.setdefault("metavar", f"[{','.join(list(choices))}]") super(EnumAction, self).__init__(**kwargs) self._enum = enum_type def __call__(self, parser, namespace, values, option_string=None): value = self._enum(values) setattr(namespace, self.dest, value) parser = argparse.ArgumentParser() parser.add_argument("--listen", type=str, default="127.0.0.1", metavar="IP", nargs="?", const="0.0.0.0", help="Specify the IP address to listen on (default: 127.0.0.1). If --listen is provided without an argument, it defaults to 0.0.0.0. (listens on all)") parser.add_argument("--port", type=int, default=8188, help="Set the listen port.") parser.add_argument("--tls-keyfile", type=str, help="Path to TLS (SSL) key file. Enables TLS, makes app accessible at https: parser.add_argument("--tls-certfile", type=str, help="Path to TLS (SSL) certificate file. Enables TLS, makes app accessible at https: parser.add_argument("--enable-cors-header", type=str, default=None, metavar="ORIGIN", nargs="?", const="*", help="Enable CORS (Cross-Origin Resource Sharing) with optional origin or allow all with default '*'.") parser.add_argument("--max-upload-size", type=float, default=100, help="Set the maximum upload size in MB.") parser.add_argument("--extra-model-paths-config", type=str, default=None, metavar="PATH", nargs='+', action='append', help="Load one or more extra_model_paths.yaml files.") parser.add_argument("--output-directory", type=str, default=None, help="Set the ComfyUI output directory.") parser.add_argument("--temp-directory", type=str, default=None, help="Set the ComfyUI temp directory (default is in the ComfyUI directory).") parser.add_argument("--input-directory", type=str, default=None, help="Set the ComfyUI input directory.") parser.add_argument("--auto-launch", action="store_true", help="Automatically launch ComfyUI in the default browser.") parser.add_argument("--disable-auto-launch", action="store_true", help="Disable auto launching the browser.") parser.add_argument("--cuda-device", type=int, default=None, metavar="DEVICE_ID", help="Set the id of the cuda device this instance will use.") cm_group = parser.add_mutually_exclusive_group() cm_group.add_argument("--cuda-malloc", action="store_true", help="Enable cudaMallocAsync (enabled by default for torch 2.0 and up).") cm_group.add_argument("--disable-cuda-malloc", action="store_true", help="Disable cudaMallocAsync.") fp_group = parser.add_mutually_exclusive_group() fp_group.add_argument("--force-fp32", action="store_true", help="Force fp32 (If this makes your GPU work better please report it).") fp_group.add_argument("--force-fp16", action="store_true", help="Force fp16.") fpunet_group = parser.add_mutually_exclusive_group() fpunet_group.add_argument("--bf16-unet", action="store_true", help="Run the UNET in bf16. This should only be used for testing stuff.") fpunet_group.add_argument("--fp16-unet", action="store_true", help="Store unet weights in fp16.") fpunet_group.add_argument("--fp8_e4m3fn-unet", action="store_true", help="Store unet weights in fp8_e4m3fn.") fpunet_group.add_argument("--fp8_e5m2-unet", action="store_true", help="Store unet weights in fp8_e5m2.") fpvae_group = parser.add_mutually_exclusive_group() fpvae_group.add_argument("--fp16-vae", action="store_true", help="Run the VAE in fp16, might cause black images.") fpvae_group.add_argument("--fp32-vae", action="store_true", help="Run the VAE in full precision fp32.") fpvae_group.add_argument("--bf16-vae", action="store_true", help="Run the VAE in bf16.") parser.add_argument("--cpu-vae", action="store_true", help="Run the VAE on the CPU.") fpte_group = parser.add_mutually_exclusive_group() fpte_group.add_argument("--fp8_e4m3fn-text-enc", action="store_true", help="Store text encoder weights in fp8 (e4m3fn variant).") fpte_group.add_argument("--fp8_e5m2-text-enc", action="store_true", help="Store text encoder weights in fp8 (e5m2 variant).") fpte_group.add_argument("--fp16-text-enc", action="store_true", help="Store text encoder weights in fp16.") fpte_group.add_argument("--fp32-text-enc", action="store_true", help="Store text encoder weights in fp32.") parser.add_argument("--force-channels-last", action="store_true", help="Force channels last format when inferencing the models.") parser.add_argument("--directml", type=int, nargs="?", metavar="DIRECTML_DEVICE", const=-1, help="Use torch-directml.") parser.add_argument("--disable-ipex-optimize", action="store_true", help="Disables ipex.optimize when loading models with Intel GPUs.") class LatentPreviewMethod(enum.Enum): NoPreviews = "none" Auto = "auto" Latent2RGB = "latent2rgb" TAESD = "taesd" parser.add_argument("--preview-method", type=LatentPreviewMethod, default=LatentPreviewMethod.NoPreviews, help="Default preview method for sampler nodes.", action=EnumAction) attn_group = parser.add_mutually_exclusive_group() attn_group.add_argument("--use-split-cross-attention", action="store_true", help="Use the split cross attention optimization. Ignored when xformers is used.") attn_group.add_argument("--use-quad-cross-attention", action="store_true", help="Use the sub-quadratic cross attention optimization . Ignored when xformers is used.") attn_group.add_argument("--use-pytorch-cross-attention", action="store_true", help="Use the new pytorch 2.0 cross attention function.") parser.add_argument("--disable-xformers", action="store_true", help="Disable xformers.") upcast = parser.add_mutually_exclusive_group() upcast.add_argument("--force-upcast-attention", action="store_true", help="Force enable attention upcasting, please report if it fixes black images.") upcast.add_argument("--dont-upcast-attention", action="store_true", help="Disable all upcasting of attention. Should be unnecessary except for debugging.") vram_group = parser.add_mutually_exclusive_group() vram_group.add_argument("--gpu-only", action="store_true", help="Store and run everything (text encoders/CLIP models, etc... on the GPU).") vram_group.add_argument("--highvram", action="store_true", help="By default models will be unloaded to CPU memory after being used. This option keeps them in GPU memory.") vram_group.add_argument("--normalvram", action="store_true", help="Used to force normal vram use if lowvram gets automatically enabled.") vram_group.add_argument("--lowvram", action="store_true", help="Split the unet in parts to use less vram.") vram_group.add_argument("--novram", action="store_true", help="When lowvram isn't enough.") vram_group.add_argument("--cpu", action="store_true", help="To use the CPU for everything (slow).") parser.add_argument("--disable-smart-memory", action="store_true", help="Force ComfyUI to agressively offload to regular ram instead of keeping models in vram when it can.") parser.add_argument("--deterministic", action="store_true", help="Make pytorch use slower deterministic algorithms when it can. Note that this might not make images deterministic in all cases.") parser.add_argument("--dont-print-server", action="store_true", help="Don't print server output.") parser.add_argument("--quick-test-for-ci", action="store_true", help="Quick test for CI.") parser.add_argument("--windows-standalone-build", action="store_true", help="Windows standalone build: Enable convenient things that most people using the standalone windows build will probably enjoy (like auto opening the page on startup).") parser.add_argument("--disable-metadata", action="store_true", help="Disable saving prompt metadata in files.") parser.add_argument("--multi-user", action="store_true", help="Enables per-user storage.") parser.add_argument("--verbose", action="store_true", help="Enables more debug prints.") if comfy.options.args_parsing: args = parser.parse_args() else: args = parser.parse_args([]) if args.windows_standalone_build: args.auto_launch = True if args.disable_auto_launch: args.auto_launch = False import logging logging_level = logging.INFO if args.verbose: logging_level = logging.DEBUG logging.basicConfig(format="%(message)s", level=logging_level)
import torch import math import comfy.utils def lcm(a, b): return abs(a*b) class CONDRegular: def __init__(self, cond): self.cond = cond def _copy_with(self, cond): return self.__class__(cond) def process_cond(self, batch_size, device, **kwargs): return self._copy_with(comfy.utils.repeat_to_batch_size(self.cond, batch_size).to(device)) def can_concat(self, other): if self.cond.shape != other.cond.shape: return False return True def concat(self, others): conds = [self.cond] for x in others: conds.append(x.cond) return torch.cat(conds) class CONDNoiseShape(CONDRegular): def process_cond(self, batch_size, device, area, **kwargs): data = self.cond if area is not None: dims = len(area) for i in range(dims): data = data.narrow(i + 2, area[i + dims], area[i]) return self._copy_with(comfy.utils.repeat_to_batch_size(data, batch_size).to(device)) class CONDCrossAttn(CONDRegular): def can_concat(self, other): s1 = self.cond.shape s2 = other.cond.shape if s1 != s2: if s1[0] != s2[0] or s1[2] != s2[2]: return False mult_min = lcm(s1[1], s2[1]) diff = mult_min if diff > 4: return False return True def concat(self, others): conds = [self.cond] crossattn_max_len = self.cond.shape[1] for x in others: c = x.cond crossattn_max_len = lcm(crossattn_max_len, c.shape[1]) conds.append(c) out = [] for c in conds: if c.shape[1] < crossattn_max_len: c = c.repeat(1, crossattn_max_len out.append(c) return torch.cat(out) class CONDConstant(CONDRegular): def __init__(self, cond): self.cond = cond def process_cond(self, batch_size, device, **kwargs): return self._copy_with(self.cond) def can_concat(self, other): if self.cond != other.cond: return False return True def concat(self, others): return self.cond
import torch import math import os import logging import comfy.utils import comfy.model_management import comfy.model_detection import comfy.model_patcher import comfy.ops import comfy.cldm.cldm import comfy.t2i_adapter.adapter import comfy.ldm.cascade.controlnet def broadcast_image_to(tensor, target_batch_size, batched_number): current_batch_size = tensor.shape[0] if current_batch_size == 1: return tensor per_batch = target_batch_size tensor = tensor[:per_batch] if per_batch > tensor.shape[0]: tensor = torch.cat([tensor] * (per_batch current_batch_size = tensor.shape[0] if current_batch_size == target_batch_size: return tensor else: return torch.cat([tensor] * batched_number, dim=0) class ControlBase: def __init__(self, device=None): self.cond_hint_original = None self.cond_hint = None self.strength = 1.0 self.timestep_percent_range = (0.0, 1.0) self.global_average_pooling = False self.timestep_range = None self.compression_ratio = 8 self.upscale_algorithm = 'nearest-exact' if device is None: device = comfy.model_management.get_torch_device() self.device = device self.previous_controlnet = None def set_cond_hint(self, cond_hint, strength=1.0, timestep_percent_range=(0.0, 1.0)): self.cond_hint_original = cond_hint self.strength = strength self.timestep_percent_range = timestep_percent_range return self def pre_run(self, model, percent_to_timestep_function): self.timestep_range = (percent_to_timestep_function(self.timestep_percent_range[0]), percent_to_timestep_function(self.timestep_percent_range[1])) if self.previous_controlnet is not None: self.previous_controlnet.pre_run(model, percent_to_timestep_function) def set_previous_controlnet(self, controlnet): self.previous_controlnet = controlnet return self def cleanup(self): if self.previous_controlnet is not None: self.previous_controlnet.cleanup() if self.cond_hint is not None: del self.cond_hint self.cond_hint = None self.timestep_range = None def get_models(self): out = [] if self.previous_controlnet is not None: out += self.previous_controlnet.get_models() return out def copy_to(self, c): c.cond_hint_original = self.cond_hint_original c.strength = self.strength c.timestep_percent_range = self.timestep_percent_range c.global_average_pooling = self.global_average_pooling c.compression_ratio = self.compression_ratio c.upscale_algorithm = self.upscale_algorithm def inference_memory_requirements(self, dtype): if self.previous_controlnet is not None: return self.previous_controlnet.inference_memory_requirements(dtype) return 0 def control_merge(self, control_input, control_output, control_prev, output_dtype): out = {'input':[], 'middle':[], 'output': []} if control_input is not None: for i in range(len(control_input)): key = 'input' x = control_input[i] if x is not None: x *= self.strength if x.dtype != output_dtype: x = x.to(output_dtype) out[key].insert(0, x) if control_output is not None: for i in range(len(control_output)): if i == (len(control_output) - 1): key = 'middle' index = 0 else: key = 'output' index = i x = control_output[i] if x is not None: if self.global_average_pooling: x = torch.mean(x, dim=(2, 3), keepdim=True).repeat(1, 1, x.shape[2], x.shape[3]) x *= self.strength if x.dtype != output_dtype: x = x.to(output_dtype) out[key].append(x) if control_prev is not None: for x in ['input', 'middle', 'output']: o = out[x] for i in range(len(control_prev[x])): prev_val = control_prev[x][i] if i >= len(o): o.append(prev_val) elif prev_val is not None: if o[i] is None: o[i] = prev_val else: if o[i].shape[0] < prev_val.shape[0]: o[i] = prev_val + o[i] else: o[i] += prev_val return out class ControlNet(ControlBase): def __init__(self, control_model=None, global_average_pooling=False, device=None, load_device=None, manual_cast_dtype=None): super().__init__(device) self.control_model = control_model self.load_device = load_device if control_model is not None: self.control_model_wrapped = comfy.model_patcher.ModelPatcher(self.control_model, load_device=load_device, offload_device=comfy.model_management.unet_offload_device()) self.global_average_pooling = global_average_pooling self.model_sampling_current = None self.manual_cast_dtype = manual_cast_dtype def get_control(self, x_noisy, t, cond, batched_number): control_prev = None if self.previous_controlnet is not None: control_prev = self.previous_controlnet.get_control(x_noisy, t, cond, batched_number) if self.timestep_range is not None: if t[0] > self.timestep_range[0] or t[0] < self.timestep_range[1]: if control_prev is not None: return control_prev else: return None dtype = self.control_model.dtype if self.manual_cast_dtype is not None: dtype = self.manual_cast_dtype output_dtype = x_noisy.dtype if self.cond_hint is None or x_noisy.shape[2] * self.compression_ratio != self.cond_hint.shape[2] or x_noisy.shape[3] * self.compression_ratio != self.cond_hint.shape[3]: if self.cond_hint is not None: del self.cond_hint self.cond_hint = None self.cond_hint = comfy.utils.common_upscale(self.cond_hint_original, x_noisy.shape[3] * self.compression_ratio, x_noisy.shape[2] * self.compression_ratio, self.upscale_algorithm, "center").to(dtype).to(self.device) if x_noisy.shape[0] != self.cond_hint.shape[0]: self.cond_hint = broadcast_image_to(self.cond_hint, x_noisy.shape[0], batched_number) context = cond.get('crossattn_controlnet', cond['c_crossattn']) y = cond.get('y', None) if y is not None: y = y.to(dtype) timestep = self.model_sampling_current.timestep(t) x_noisy = self.model_sampling_current.calculate_input(t, x_noisy) control = self.control_model(x=x_noisy.to(dtype), hint=self.cond_hint, timesteps=timestep.float(), context=context.to(dtype), y=y) return self.control_merge(None, control, control_prev, output_dtype) def copy(self): c = ControlNet(None, global_average_pooling=self.global_average_pooling, load_device=self.load_device, manual_cast_dtype=self.manual_cast_dtype) c.control_model = self.control_model c.control_model_wrapped = self.control_model_wrapped self.copy_to(c) return c def get_models(self): out = super().get_models() out.append(self.control_model_wrapped) return out def pre_run(self, model, percent_to_timestep_function): super().pre_run(model, percent_to_timestep_function) self.model_sampling_current = model.model_sampling def cleanup(self): self.model_sampling_current = None super().cleanup() class ControlLoraOps: class Linear(torch.nn.Module, comfy.ops.CastWeightBiasOp): def __init__(self, in_features: int, out_features: int, bias: bool = True, device=None, dtype=None) -> None: factory_kwargs = {'device': device, 'dtype': dtype} super().__init__() self.in_features = in_features self.out_features = out_features self.weight = None self.up = None self.down = None self.bias = None def forward(self, input): weight, bias = comfy.ops.cast_bias_weight(self, input) if self.up is not None: return torch.nn.functional.linear(input, weight + (torch.mm(self.up.flatten(start_dim=1), self.down.flatten(start_dim=1))).reshape(self.weight.shape).type(input.dtype), bias) else: return torch.nn.functional.linear(input, weight, bias) class Conv2d(torch.nn.Module, comfy.ops.CastWeightBiasOp): def __init__( self, in_channels, out_channels, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=True, padding_mode='zeros', device=None, dtype=None ): super().__init__() self.in_channels = in_channels self.out_channels = out_channels self.kernel_size = kernel_size self.stride = stride self.padding = padding self.dilation = dilation self.transposed = False self.output_padding = 0 self.groups = groups self.padding_mode = padding_mode self.weight = None self.bias = None self.up = None self.down = None def forward(self, input): weight, bias = comfy.ops.cast_bias_weight(self, input) if self.up is not None: return torch.nn.functional.conv2d(input, weight + (torch.mm(self.up.flatten(start_dim=1), self.down.flatten(start_dim=1))).reshape(self.weight.shape).type(input.dtype), bias, self.stride, self.padding, self.dilation, self.groups) else: return torch.nn.functional.conv2d(input, weight, bias, self.stride, self.padding, self.dilation, self.groups) class ControlLora(ControlNet): def __init__(self, control_weights, global_average_pooling=False, device=None): ControlBase.__init__(self, device) self.control_weights = control_weights self.global_average_pooling = global_average_pooling def pre_run(self, model, percent_to_timestep_function): super().pre_run(model, percent_to_timestep_function) controlnet_config = model.model_config.unet_config.copy() controlnet_config.pop("out_channels") controlnet_config["hint_channels"] = self.control_weights["input_hint_block.0.weight"].shape[1] self.manual_cast_dtype = model.manual_cast_dtype dtype = model.get_dtype() if self.manual_cast_dtype is None: class control_lora_ops(ControlLoraOps, comfy.ops.disable_weight_init): pass else: class control_lora_ops(ControlLoraOps, comfy.ops.manual_cast): pass dtype = self.manual_cast_dtype controlnet_config["operations"] = control_lora_ops controlnet_config["dtype"] = dtype self.control_model = comfy.cldm.cldm.ControlNet(**controlnet_config) self.control_model.to(comfy.model_management.get_torch_device()) diffusion_model = model.diffusion_model sd = diffusion_model.state_dict() cm = self.control_model.state_dict() for k in sd: weight = sd[k] try: comfy.utils.set_attr_param(self.control_model, k, weight) except: pass for k in self.control_weights: if k not in {"lora_controlnet"}: comfy.utils.set_attr_param(self.control_model, k, self.control_weights[k].to(dtype).to(comfy.model_management.get_torch_device())) def copy(self): c = ControlLora(self.control_weights, global_average_pooling=self.global_average_pooling) self.copy_to(c) return c def cleanup(self): del self.control_model self.control_model = None super().cleanup() def get_models(self): out = ControlBase.get_models(self) return out def inference_memory_requirements(self, dtype): return comfy.utils.calculate_parameters(self.control_weights) * comfy.model_management.dtype_size(dtype) + ControlBase.inference_memory_requirements(self, dtype) def load_controlnet(ckpt_path, model=None): controlnet_data = comfy.utils.load_torch_file(ckpt_path, safe_load=True) if "lora_controlnet" in controlnet_data: return ControlLora(controlnet_data) controlnet_config = None supported_inference_dtypes = None if "controlnet_cond_embedding.conv_in.weight" in controlnet_data: controlnet_config = comfy.model_detection.unet_config_from_diffusers_unet(controlnet_data) diffusers_keys = comfy.utils.unet_to_diffusers(controlnet_config) diffusers_keys["controlnet_mid_block.weight"] = "middle_block_out.0.weight" diffusers_keys["controlnet_mid_block.bias"] = "middle_block_out.0.bias" count = 0 loop = True while loop: suffix = [".weight", ".bias"] for s in suffix: k_in = "controlnet_down_blocks.{}{}".format(count, s) k_out = "zero_convs.{}.0{}".format(count, s) if k_in not in controlnet_data: loop = False break diffusers_keys[k_in] = k_out count += 1 count = 0 loop = True while loop: suffix = [".weight", ".bias"] for s in suffix: if count == 0: k_in = "controlnet_cond_embedding.conv_in{}".format(s) else: k_in = "controlnet_cond_embedding.blocks.{}{}".format(count - 1, s) k_out = "input_hint_block.{}{}".format(count * 2, s) if k_in not in controlnet_data: k_in = "controlnet_cond_embedding.conv_out{}".format(s) loop = False diffusers_keys[k_in] = k_out count += 1 new_sd = {} for k in diffusers_keys: if k in controlnet_data: new_sd[diffusers_keys[k]] = controlnet_data.pop(k) leftover_keys = controlnet_data.keys() if len(leftover_keys) > 0: logging.warning("leftover keys: {}".format(leftover_keys)) controlnet_data = new_sd pth_key = 'control_model.zero_convs.0.0.weight' pth = False key = 'zero_convs.0.0.weight' if pth_key in controlnet_data: pth = True key = pth_key prefix = "control_model." elif key in controlnet_data: prefix = "" else: net = load_t2i_adapter(controlnet_data) if net is None: logging.error("error checkpoint does not contain controlnet or t2i adapter data {}".format(ckpt_path)) return net if controlnet_config is None: model_config = comfy.model_detection.model_config_from_unet(controlnet_data, prefix, True) supported_inference_dtypes = model_config.supported_inference_dtypes controlnet_config = model_config.unet_config load_device = comfy.model_management.get_torch_device() if supported_inference_dtypes is None: unet_dtype = comfy.model_management.unet_dtype() else: unet_dtype = comfy.model_management.unet_dtype(supported_dtypes=supported_inference_dtypes) manual_cast_dtype = comfy.model_management.unet_manual_cast(unet_dtype, load_device) if manual_cast_dtype is not None: controlnet_config["operations"] = comfy.ops.manual_cast controlnet_config["dtype"] = unet_dtype controlnet_config.pop("out_channels") controlnet_config["hint_channels"] = controlnet_data["{}input_hint_block.0.weight".format(prefix)].shape[1] control_model = comfy.cldm.cldm.ControlNet(**controlnet_config) if pth: if 'difference' in controlnet_data: if model is not None: comfy.model_management.load_models_gpu([model]) model_sd = model.model_state_dict() for x in controlnet_data: c_m = "control_model." if x.startswith(c_m): sd_key = "diffusion_model.{}".format(x[len(c_m):]) if sd_key in model_sd: cd = controlnet_data[x] cd += model_sd[sd_key].type(cd.dtype).to(cd.device) else: logging.warning("WARNING: Loaded a diff controlnet without a model. It will very likely not work.") class WeightsLoader(torch.nn.Module): pass w = WeightsLoader() w.control_model = control_model missing, unexpected = w.load_state_dict(controlnet_data, strict=False) else: missing, unexpected = control_model.load_state_dict(controlnet_data, strict=False) if len(missing) > 0: logging.warning("missing controlnet keys: {}".format(missing)) if len(unexpected) > 0: logging.debug("unexpected controlnet keys: {}".format(unexpected)) global_average_pooling = False filename = os.path.splitext(ckpt_path)[0] if filename.endswith("_shuffle") or filename.endswith("_shuffle_fp16"): global_average_pooling = True control = ControlNet(control_model, global_average_pooling=global_average_pooling, load_device=load_device, manual_cast_dtype=manual_cast_dtype) return control class T2IAdapter(ControlBase): def __init__(self, t2i_model, channels_in, compression_ratio, upscale_algorithm, device=None): super().__init__(device) self.t2i_model = t2i_model self.channels_in = channels_in self.control_input = None self.compression_ratio = compression_ratio self.upscale_algorithm = upscale_algorithm def scale_image_to(self, width, height): unshuffle_amount = self.t2i_model.unshuffle_amount width = math.ceil(width / unshuffle_amount) * unshuffle_amount height = math.ceil(height / unshuffle_amount) * unshuffle_amount return width, height def get_control(self, x_noisy, t, cond, batched_number): control_prev = None if self.previous_controlnet is not None: control_prev = self.previous_controlnet.get_control(x_noisy, t, cond, batched_number) if self.timestep_range is not None: if t[0] > self.timestep_range[0] or t[0] < self.timestep_range[1]: if control_prev is not None: return control_prev else: return None if self.cond_hint is None or x_noisy.shape[2] * self.compression_ratio != self.cond_hint.shape[2] or x_noisy.shape[3] * self.compression_ratio != self.cond_hint.shape[3]: if self.cond_hint is not None: del self.cond_hint self.control_input = None self.cond_hint = None width, height = self.scale_image_to(x_noisy.shape[3] * self.compression_ratio, x_noisy.shape[2] * self.compression_ratio) self.cond_hint = comfy.utils.common_upscale(self.cond_hint_original, width, height, self.upscale_algorithm, "center").float().to(self.device) if self.channels_in == 1 and self.cond_hint.shape[1] > 1: self.cond_hint = torch.mean(self.cond_hint, 1, keepdim=True) if x_noisy.shape[0] != self.cond_hint.shape[0]: self.cond_hint = broadcast_image_to(self.cond_hint, x_noisy.shape[0], batched_number) if self.control_input is None: self.t2i_model.to(x_noisy.dtype) self.t2i_model.to(self.device) self.control_input = self.t2i_model(self.cond_hint.to(x_noisy.dtype)) self.t2i_model.cpu() control_input = list(map(lambda a: None if a is None else a.clone(), self.control_input)) mid = None if self.t2i_model.xl == True: mid = control_input[-1:] control_input = control_input[:-1] return self.control_merge(control_input, mid, control_prev, x_noisy.dtype) def copy(self): c = T2IAdapter(self.t2i_model, self.channels_in, self.compression_ratio, self.upscale_algorithm) self.copy_to(c) return c def load_t2i_adapter(t2i_data): compression_ratio = 8 upscale_algorithm = 'nearest-exact' if 'adapter' in t2i_data: t2i_data = t2i_data['adapter'] if 'adapter.body.0.resnets.0.block1.weight' in t2i_data: prefix_replace = {} for i in range(4): for j in range(2): prefix_replace["adapter.body.{}.resnets.{}.".format(i, j)] = "body.{}.".format(i * 2 + j) prefix_replace["adapter.body.{}.".format(i, j)] = "body.{}.".format(i * 2) prefix_replace["adapter."] = "" t2i_data = comfy.utils.state_dict_prefix_replace(t2i_data, prefix_replace) keys = t2i_data.keys() if "body.0.in_conv.weight" in keys: cin = t2i_data['body.0.in_conv.weight'].shape[1] model_ad = comfy.t2i_adapter.adapter.Adapter_light(cin=cin, channels=[320, 640, 1280, 1280], nums_rb=4) elif 'conv_in.weight' in keys: cin = t2i_data['conv_in.weight'].shape[1] channel = t2i_data['conv_in.weight'].shape[0] ksize = t2i_data['body.0.block2.weight'].shape[2] use_conv = False down_opts = list(filter(lambda a: a.endswith("down_opt.op.weight"), keys)) if len(down_opts) > 0: use_conv = True xl = False if cin == 256 or cin == 768: xl = True model_ad = comfy.t2i_adapter.adapter.Adapter(cin=cin, channels=[channel, channel*2, channel*4, channel*4][:4], nums_rb=2, ksize=ksize, sk=True, use_conv=use_conv, xl=xl) elif "backbone.0.0.weight" in keys: model_ad = comfy.ldm.cascade.controlnet.ControlNet(c_in=t2i_data['backbone.0.0.weight'].shape[1], proj_blocks=[0, 4, 8, 12, 51, 55, 59, 63]) compression_ratio = 32 upscale_algorithm = 'bilinear' elif "backbone.10.blocks.0.weight" in keys: model_ad = comfy.ldm.cascade.controlnet.ControlNet(c_in=t2i_data['backbone.0.weight'].shape[1], bottleneck_mode="large", proj_blocks=[0, 4, 8, 12, 51, 55, 59, 63]) compression_ratio = 1 upscale_algorithm = 'nearest-exact' else: return None missing, unexpected = model_ad.load_state_dict(t2i_data) if len(missing) > 0: logging.warning("t2i missing {}".format(missing)) if len(unexpected) > 0: logging.debug("t2i unexpected {}".format(unexpected)) return T2IAdapter(model_ad, model_ad.input_channels, compression_ratio, upscale_algorithm)
import re import torch import logging unet_conversion_map = [ ("time_embed.0.weight", "time_embedding.linear_1.weight"), ("time_embed.0.bias", "time_embedding.linear_1.bias"), ("time_embed.2.weight", "time_embedding.linear_2.weight"), ("time_embed.2.bias", "time_embedding.linear_2.bias"), ("input_blocks.0.0.weight", "conv_in.weight"), ("input_blocks.0.0.bias", "conv_in.bias"), ("out.0.weight", "conv_norm_out.weight"), ("out.0.bias", "conv_norm_out.bias"), ("out.2.weight", "conv_out.weight"), ("out.2.bias", "conv_out.bias"), ] unet_conversion_map_resnet = [ ("in_layers.0", "norm1"), ("in_layers.2", "conv1"), ("out_layers.0", "norm2"), ("out_layers.3", "conv2"), ("emb_layers.1", "time_emb_proj"), ("skip_connection", "conv_shortcut"), ] unet_conversion_map_layer = [] for i in range(4): for j in range(2): hf_down_res_prefix = f"down_blocks.{i}.resnets.{j}." sd_down_res_prefix = f"input_blocks.{3 * i + j + 1}.0." unet_conversion_map_layer.append((sd_down_res_prefix, hf_down_res_prefix)) if i < 3: hf_down_atn_prefix = f"down_blocks.{i}.attentions.{j}." sd_down_atn_prefix = f"input_blocks.{3 * i + j + 1}.1." unet_conversion_map_layer.append((sd_down_atn_prefix, hf_down_atn_prefix)) for j in range(3): hf_up_res_prefix = f"up_blocks.{i}.resnets.{j}." sd_up_res_prefix = f"output_blocks.{3 * i + j}.0." unet_conversion_map_layer.append((sd_up_res_prefix, hf_up_res_prefix)) if i > 0: hf_up_atn_prefix = f"up_blocks.{i}.attentions.{j}." sd_up_atn_prefix = f"output_blocks.{3 * i + j}.1." unet_conversion_map_layer.append((sd_up_atn_prefix, hf_up_atn_prefix)) if i < 3: hf_downsample_prefix = f"down_blocks.{i}.downsamplers.0.conv." sd_downsample_prefix = f"input_blocks.{3 * (i + 1)}.0.op." unet_conversion_map_layer.append((sd_downsample_prefix, hf_downsample_prefix)) hf_upsample_prefix = f"up_blocks.{i}.upsamplers.0." sd_upsample_prefix = f"output_blocks.{3 * i + 2}.{1 if i == 0 else 2}." unet_conversion_map_layer.append((sd_upsample_prefix, hf_upsample_prefix)) hf_mid_atn_prefix = "mid_block.attentions.0." sd_mid_atn_prefix = "middle_block.1." unet_conversion_map_layer.append((sd_mid_atn_prefix, hf_mid_atn_prefix)) for j in range(2): hf_mid_res_prefix = f"mid_block.resnets.{j}." sd_mid_res_prefix = f"middle_block.{2 * j}." unet_conversion_map_layer.append((sd_mid_res_prefix, hf_mid_res_prefix)) def convert_unet_state_dict(unet_state_dict): mapping = {k: k for k in unet_state_dict.keys()} for sd_name, hf_name in unet_conversion_map: mapping[hf_name] = sd_name for k, v in mapping.items(): if "resnets" in k: for sd_part, hf_part in unet_conversion_map_resnet: v = v.replace(hf_part, sd_part) mapping[k] = v for k, v in mapping.items(): for sd_part, hf_part in unet_conversion_map_layer: v = v.replace(hf_part, sd_part) mapping[k] = v new_state_dict = {v: unet_state_dict[k] for k, v in mapping.items()} return new_state_dict vae_conversion_map = [ ("nin_shortcut", "conv_shortcut"), ("norm_out", "conv_norm_out"), ("mid.attn_1.", "mid_block.attentions.0."), ] for i in range(4): for j in range(2): hf_down_prefix = f"encoder.down_blocks.{i}.resnets.{j}." sd_down_prefix = f"encoder.down.{i}.block.{j}." vae_conversion_map.append((sd_down_prefix, hf_down_prefix)) if i < 3: hf_downsample_prefix = f"down_blocks.{i}.downsamplers.0." sd_downsample_prefix = f"down.{i}.downsample." vae_conversion_map.append((sd_downsample_prefix, hf_downsample_prefix)) hf_upsample_prefix = f"up_blocks.{i}.upsamplers.0." sd_upsample_prefix = f"up.{3 - i}.upsample." vae_conversion_map.append((sd_upsample_prefix, hf_upsample_prefix)) for j in range(3): hf_up_prefix = f"decoder.up_blocks.{i}.resnets.{j}." sd_up_prefix = f"decoder.up.{3 - i}.block.{j}." vae_conversion_map.append((sd_up_prefix, hf_up_prefix)) for i in range(2): hf_mid_res_prefix = f"mid_block.resnets.{i}." sd_mid_res_prefix = f"mid.block_{i + 1}." vae_conversion_map.append((sd_mid_res_prefix, hf_mid_res_prefix)) vae_conversion_map_attn = [ ("norm.", "group_norm."), ("q.", "query."), ("k.", "key."), ("v.", "value."), ("q.", "to_q."), ("k.", "to_k."), ("v.", "to_v."), ("proj_out.", "to_out.0."), ("proj_out.", "proj_attn."), ] def reshape_weight_for_sd(w): return w.reshape(*w.shape, 1, 1) def convert_vae_state_dict(vae_state_dict): mapping = {k: k for k in vae_state_dict.keys()} for k, v in mapping.items(): for sd_part, hf_part in vae_conversion_map: v = v.replace(hf_part, sd_part) mapping[k] = v for k, v in mapping.items(): if "attentions" in k: for sd_part, hf_part in vae_conversion_map_attn: v = v.replace(hf_part, sd_part) mapping[k] = v new_state_dict = {v: vae_state_dict[k] for k, v in mapping.items()} weights_to_convert = ["q", "k", "v", "proj_out"] for k, v in new_state_dict.items(): for weight_name in weights_to_convert: if f"mid.attn_1.{weight_name}.weight" in k: logging.debug(f"Reshaping {k} for SD format") new_state_dict[k] = reshape_weight_for_sd(v) return new_state_dict textenc_conversion_lst = [ ("resblocks.", "text_model.encoder.layers."), ("ln_1", "layer_norm1"), ("ln_2", "layer_norm2"), (".c_fc.", ".fc1."), (".c_proj.", ".fc2."), (".attn", ".self_attn"), ("ln_final.", "transformer.text_model.final_layer_norm."), ("token_embedding.weight", "transformer.text_model.embeddings.token_embedding.weight"), ("positional_embedding", "transformer.text_model.embeddings.position_embedding.weight"), ] protected = {re.escape(x[1]): x[0] for x in textenc_conversion_lst} textenc_pattern = re.compile("|".join(protected.keys())) code2idx = {"q": 0, "k": 1, "v": 2} def cat_tensors(tensors): x = 0 for t in tensors: x += t.shape[0] shape = [x] + list(tensors[0].shape)[1:] out = torch.empty(shape, device=tensors[0].device, dtype=tensors[0].dtype) x = 0 for t in tensors: out[x:x + t.shape[0]] = t x += t.shape[0] return out def convert_text_enc_state_dict_v20(text_enc_dict, prefix=""): new_state_dict = {} capture_qkv_weight = {} capture_qkv_bias = {} for k, v in text_enc_dict.items(): if not k.startswith(prefix): continue if ( k.endswith(".self_attn.q_proj.weight") or k.endswith(".self_attn.k_proj.weight") or k.endswith(".self_attn.v_proj.weight") ): k_pre = k[: -len(".q_proj.weight")] k_code = k[-len("q_proj.weight")] if k_pre not in capture_qkv_weight: capture_qkv_weight[k_pre] = [None, None, None] capture_qkv_weight[k_pre][code2idx[k_code]] = v continue if ( k.endswith(".self_attn.q_proj.bias") or k.endswith(".self_attn.k_proj.bias") or k.endswith(".self_attn.v_proj.bias") ): k_pre = k[: -len(".q_proj.bias")] k_code = k[-len("q_proj.bias")] if k_pre not in capture_qkv_bias: capture_qkv_bias[k_pre] = [None, None, None] capture_qkv_bias[k_pre][code2idx[k_code]] = v continue text_proj = "transformer.text_projection.weight" if k.endswith(text_proj): new_state_dict[k.replace(text_proj, "text_projection")] = v.transpose(0, 1).contiguous() else: relabelled_key = textenc_pattern.sub(lambda m: protected[re.escape(m.group(0))], k) new_state_dict[relabelled_key] = v for k_pre, tensors in capture_qkv_weight.items(): if None in tensors: raise Exception("CORRUPTED MODEL: one of the q-k-v values for the text encoder was missing") relabelled_key = textenc_pattern.sub(lambda m: protected[re.escape(m.group(0))], k_pre) new_state_dict[relabelled_key + ".in_proj_weight"] = cat_tensors(tensors) for k_pre, tensors in capture_qkv_bias.items(): if None in tensors: raise Exception("CORRUPTED MODEL: one of the q-k-v values for the text encoder was missing") relabelled_key = textenc_pattern.sub(lambda m: protected[re.escape(m.group(0))], k_pre) new_state_dict[relabelled_key + ".in_proj_bias"] = cat_tensors(tensors) return new_state_dict def convert_text_enc_state_dict(text_enc_dict): return text_enc_dict
import os import comfy.sd def first_file(path, filenames): for f in filenames: p = os.path.join(path, f) if os.path.exists(p): return p return None def load_diffusers(model_path, output_vae=True, output_clip=True, embedding_directory=None): diffusion_model_names = ["diffusion_pytorch_model.fp16.safetensors", "diffusion_pytorch_model.safetensors", "diffusion_pytorch_model.fp16.bin", "diffusion_pytorch_model.bin"] unet_path = first_file(os.path.join(model_path, "unet"), diffusion_model_names) vae_path = first_file(os.path.join(model_path, "vae"), diffusion_model_names) text_encoder_model_names = ["model.fp16.safetensors", "model.safetensors", "pytorch_model.fp16.bin", "pytorch_model.bin"] text_encoder1_path = first_file(os.path.join(model_path, "text_encoder"), text_encoder_model_names) text_encoder2_path = first_file(os.path.join(model_path, "text_encoder_2"), text_encoder_model_names) text_encoder_paths = [text_encoder1_path] if text_encoder2_path is not None: text_encoder_paths.append(text_encoder2_path) unet = comfy.sd.load_unet(unet_path) clip = None if output_clip: clip = comfy.sd.load_clip(text_encoder_paths, embedding_directory=embedding_directory) vae = None if output_vae: sd = comfy.utils.load_torch_file(vae_path) vae = comfy.sd.VAE(sd=sd) return (unet, clip, vae)
import torch from torch import nn from .ldm.modules.attention import CrossAttention from inspect import isfunction import comfy.ops ops = comfy.ops.manual_cast def exists(val): return val is not None def uniq(arr): return{el: True for el in arr}.keys() def default(val, d): if exists(val): return val return d() if isfunction(d) else d class GEGLU(nn.Module): def __init__(self, dim_in, dim_out): super().__init__() self.proj = ops.Linear(dim_in, dim_out * 2) def forward(self, x): x, gate = self.proj(x).chunk(2, dim=-1) return x * torch.nn.functional.gelu(gate) class FeedForward(nn.Module): def __init__(self, dim, dim_out=None, mult=4, glu=False, dropout=0.): super().__init__() inner_dim = int(dim * mult) dim_out = default(dim_out, dim) project_in = nn.Sequential( ops.Linear(dim, inner_dim), nn.GELU() ) if not glu else GEGLU(dim, inner_dim) self.net = nn.Sequential( project_in, nn.Dropout(dropout), ops.Linear(inner_dim, dim_out) ) def forward(self, x): return self.net(x) class GatedCrossAttentionDense(nn.Module): def __init__(self, query_dim, context_dim, n_heads, d_head): super().__init__() self.attn = CrossAttention( query_dim=query_dim, context_dim=context_dim, heads=n_heads, dim_head=d_head, operations=ops) self.ff = FeedForward(query_dim, glu=True) self.norm1 = ops.LayerNorm(query_dim) self.norm2 = ops.LayerNorm(query_dim) self.register_parameter('alpha_attn', nn.Parameter(torch.tensor(0.))) self.register_parameter('alpha_dense', nn.Parameter(torch.tensor(0.))) self.scale = 1 def forward(self, x, objs): x = x + self.scale * \ torch.tanh(self.alpha_attn) * self.attn(self.norm1(x), objs, objs) x = x + self.scale * \ torch.tanh(self.alpha_dense) * self.ff(self.norm2(x)) return x class GatedSelfAttentionDense(nn.Module): def __init__(self, query_dim, context_dim, n_heads, d_head): super().__init__() self.linear = ops.Linear(context_dim, query_dim) self.attn = CrossAttention( query_dim=query_dim, context_dim=query_dim, heads=n_heads, dim_head=d_head, operations=ops) self.ff = FeedForward(query_dim, glu=True) self.norm1 = ops.LayerNorm(query_dim) self.norm2 = ops.LayerNorm(query_dim) self.register_parameter('alpha_attn', nn.Parameter(torch.tensor(0.))) self.register_parameter('alpha_dense', nn.Parameter(torch.tensor(0.))) self.scale = 1 def forward(self, x, objs): N_visual = x.shape[1] objs = self.linear(objs) x = x + self.scale * torch.tanh(self.alpha_attn) * self.attn( self.norm1(torch.cat([x, objs], dim=1)))[:, 0:N_visual, :] x = x + self.scale * \ torch.tanh(self.alpha_dense) * self.ff(self.norm2(x)) return x class GatedSelfAttentionDense2(nn.Module): def __init__(self, query_dim, context_dim, n_heads, d_head): super().__init__() self.linear = ops.Linear(context_dim, query_dim) self.attn = CrossAttention( query_dim=query_dim, context_dim=query_dim, dim_head=d_head, operations=ops) self.ff = FeedForward(query_dim, glu=True) self.norm1 = ops.LayerNorm(query_dim) self.norm2 = ops.LayerNorm(query_dim) self.register_parameter('alpha_attn', nn.Parameter(torch.tensor(0.))) self.register_parameter('alpha_dense', nn.Parameter(torch.tensor(0.))) self.scale = 1 def forward(self, x, objs): B, N_visual, _ = x.shape B, N_ground, _ = objs.shape objs = self.linear(objs) size_v = math.sqrt(N_visual) size_g = math.sqrt(N_ground) assert int(size_v) == size_v, "Visual tokens must be square rootable" assert int(size_g) == size_g, "Grounding tokens must be square rootable" size_v = int(size_v) size_g = int(size_g) out = self.attn(self.norm1(torch.cat([x, objs], dim=1)))[ :, N_visual:, :] out = out.permute(0, 2, 1).reshape(B, -1, size_g, size_g) out = torch.nn.functional.interpolate( out, (size_v, size_v), mode='bicubic') residual = out.reshape(B, -1, N_visual).permute(0, 2, 1) x = x + self.scale * torch.tanh(self.alpha_attn) * residual x = x + self.scale * \ torch.tanh(self.alpha_dense) * self.ff(self.norm2(x)) return x class FourierEmbedder(): def __init__(self, num_freqs=64, temperature=100): self.num_freqs = num_freqs self.temperature = temperature self.freq_bands = temperature ** (torch.arange(num_freqs) / num_freqs) @torch.no_grad() def __call__(self, x, cat_dim=-1): "x: arbitrary shape of tensor. dim: cat dim" out = [] for freq in self.freq_bands: out.append(torch.sin(freq * x)) out.append(torch.cos(freq * x)) return torch.cat(out, cat_dim) class PositionNet(nn.Module): def __init__(self, in_dim, out_dim, fourier_freqs=8): super().__init__() self.in_dim = in_dim self.out_dim = out_dim self.fourier_embedder = FourierEmbedder(num_freqs=fourier_freqs) self.position_dim = fourier_freqs * 2 * 4 self.linears = nn.Sequential( ops.Linear(self.in_dim + self.position_dim, 512), nn.SiLU(), ops.Linear(512, 512), nn.SiLU(), ops.Linear(512, out_dim), ) self.null_positive_feature = torch.nn.Parameter( torch.zeros([self.in_dim])) self.null_position_feature = torch.nn.Parameter( torch.zeros([self.position_dim])) def forward(self, boxes, masks, positive_embeddings): B, N, _ = boxes.shape masks = masks.unsqueeze(-1) positive_embeddings = positive_embeddings xyxy_embedding = self.fourier_embedder(boxes) positive_null = self.null_positive_feature.to(device=boxes.device, dtype=boxes.dtype).view(1, 1, -1) xyxy_null = self.null_position_feature.to(device=boxes.device, dtype=boxes.dtype).view(1, 1, -1) positive_embeddings = positive_embeddings * \ masks + (1 - masks) * positive_null xyxy_embedding = xyxy_embedding * masks + (1 - masks) * xyxy_null objs = self.linears( torch.cat([positive_embeddings, xyxy_embedding], dim=-1)) assert objs.shape == torch.Size([B, N, self.out_dim]) return objs class Gligen(nn.Module): def __init__(self, modules, position_net, key_dim): super().__init__() self.module_list = nn.ModuleList(modules) self.position_net = position_net self.key_dim = key_dim self.max_objs = 30 self.current_device = torch.device("cpu") def _set_position(self, boxes, masks, positive_embeddings): objs = self.position_net(boxes, masks, positive_embeddings) def func(x, extra_options): key = extra_options["transformer_index"] module = self.module_list[key] return module(x, objs.to(device=x.device, dtype=x.dtype)) return func def set_position(self, latent_image_shape, position_params, device): batch, c, h, w = latent_image_shape masks = torch.zeros([self.max_objs], device="cpu") boxes = [] positive_embeddings = [] for p in position_params: x1 = (p[4]) / w y1 = (p[3]) / h x2 = (p[4] + p[2]) / w y2 = (p[3] + p[1]) / h masks[len(boxes)] = 1.0 boxes += [torch.tensor((x1, y1, x2, y2)).unsqueeze(0)] positive_embeddings += [p[0]] append_boxes = [] append_conds = [] if len(boxes) < self.max_objs: append_boxes = [torch.zeros( [self.max_objs - len(boxes), 4], device="cpu")] append_conds = [torch.zeros( [self.max_objs - len(boxes), self.key_dim], device="cpu")] box_out = torch.cat( boxes + append_boxes).unsqueeze(0).repeat(batch, 1, 1) masks = masks.unsqueeze(0).repeat(batch, 1) conds = torch.cat(positive_embeddings + append_conds).unsqueeze(0).repeat(batch, 1, 1) return self._set_position( box_out.to(device), masks.to(device), conds.to(device)) def set_empty(self, latent_image_shape, device): batch, c, h, w = latent_image_shape masks = torch.zeros([self.max_objs], device="cpu").repeat(batch, 1) box_out = torch.zeros([self.max_objs, 4], device="cpu").repeat(batch, 1, 1) conds = torch.zeros([self.max_objs, self.key_dim], device="cpu").repeat(batch, 1, 1) return self._set_position( box_out.to(device), masks.to(device), conds.to(device)) def load_gligen(sd): sd_k = sd.keys() output_list = [] key_dim = 768 for a in ["input_blocks", "middle_block", "output_blocks"]: for b in range(20): k_temp = filter(lambda k: "{}.{}.".format(a, b) in k and ".fuser." in k, sd_k) k_temp = map(lambda k: (k, k.split(".fuser.")[-1]), k_temp) n_sd = {} for k in k_temp: n_sd[k[1]] = sd[k[0]] if len(n_sd) > 0: query_dim = n_sd["linear.weight"].shape[0] key_dim = n_sd["linear.weight"].shape[1] if key_dim == 768: n_heads = 8 d_head = query_dim else: d_head = 64 n_heads = query_dim gated = GatedSelfAttentionDense( query_dim, key_dim, n_heads, d_head) gated.load_state_dict(n_sd, strict=False) output_list.append(gated) if "position_net.null_positive_feature" in sd_k: in_dim = sd["position_net.null_positive_feature"].shape[0] out_dim = sd["position_net.linears.4.weight"].shape[0] class WeightsLoader(torch.nn.Module): pass w = WeightsLoader() w.position_net = PositionNet(in_dim, out_dim) w.load_state_dict(sd, strict=False) gligen = Gligen(output_list, w.position_net, key_dim) return gligen
import torch class LatentFormat: scale_factor = 1.0 latent_channels = 4 latent_rgb_factors = None taesd_decoder_name = None def process_in(self, latent): return latent * self.scale_factor def process_out(self, latent): return latent / self.scale_factor class SD15(LatentFormat): def __init__(self, scale_factor=0.18215): self.scale_factor = scale_factor self.latent_rgb_factors = [ [ 0.3512, 0.2297, 0.3227], [ 0.3250, 0.4974, 0.2350], [-0.2829, 0.1762, 0.2721], [-0.2120, -0.2616, -0.7177] ] self.taesd_decoder_name = "taesd_decoder" class SDXL(LatentFormat): scale_factor = 0.13025 def __init__(self): self.latent_rgb_factors = [ [ 0.3920, 0.4054, 0.4549], [-0.2634, -0.0196, 0.0653], [ 0.0568, 0.1687, -0.0755], [-0.3112, -0.2359, -0.2076] ] self.taesd_decoder_name = "taesdxl_decoder" class SDXL_Playground_2_5(LatentFormat): def __init__(self): self.scale_factor = 0.5 self.latents_mean = torch.tensor([-1.6574, 1.886, -1.383, 2.5155]).view(1, 4, 1, 1) self.latents_std = torch.tensor([8.4927, 5.9022, 6.5498, 5.2299]).view(1, 4, 1, 1) self.latent_rgb_factors = [ [ 0.3920, 0.4054, 0.4549], [-0.2634, -0.0196, 0.0653], [ 0.0568, 0.1687, -0.0755], [-0.3112, -0.2359, -0.2076] ] self.taesd_decoder_name = "taesdxl_decoder" def process_in(self, latent): latents_mean = self.latents_mean.to(latent.device, latent.dtype) latents_std = self.latents_std.to(latent.device, latent.dtype) return (latent - latents_mean) * self.scale_factor / latents_std def process_out(self, latent): latents_mean = self.latents_mean.to(latent.device, latent.dtype) latents_std = self.latents_std.to(latent.device, latent.dtype) return latent * latents_std / self.scale_factor + latents_mean class SD_X4(LatentFormat): def __init__(self): self.scale_factor = 0.08333 self.latent_rgb_factors = [ [-0.2340, -0.3863, -0.3257], [ 0.0994, 0.0885, -0.0908], [-0.2833, -0.2349, -0.3741], [ 0.2523, -0.0055, -0.1651] ] class SC_Prior(LatentFormat): latent_channels = 16 def __init__(self): self.scale_factor = 1.0 self.latent_rgb_factors = [ [-0.0326, -0.0204, -0.0127], [-0.1592, -0.0427, 0.0216], [ 0.0873, 0.0638, -0.0020], [-0.0602, 0.0442, 0.1304], [ 0.0800, -0.0313, -0.1796], [-0.0810, -0.0638, -0.1581], [ 0.1791, 0.1180, 0.0967], [ 0.0740, 0.1416, 0.0432], [-0.1745, -0.1888, -0.1373], [ 0.2412, 0.1577, 0.0928], [ 0.1908, 0.0998, 0.0682], [ 0.0209, 0.0365, -0.0092], [ 0.0448, -0.0650, -0.1728], [-0.1658, -0.1045, -0.1308], [ 0.0542, 0.1545, 0.1325], [-0.0352, -0.1672, -0.2541] ] class SC_B(LatentFormat): def __init__(self): self.scale_factor = 1.0 / 0.43 self.latent_rgb_factors = [ [ 0.1121, 0.2006, 0.1023], [-0.2093, -0.0222, -0.0195], [-0.3087, -0.1535, 0.0366], [ 0.0290, -0.1574, -0.4078] ] class SD3(LatentFormat): latent_channels = 16 def __init__(self): self.scale_factor = 1.5305 self.shift_factor = 0.0609 self.latent_rgb_factors = [ [-0.0645, 0.0177, 0.1052], [ 0.0028, 0.0312, 0.0650], [ 0.1848, 0.0762, 0.0360], [ 0.0944, 0.0360, 0.0889], [ 0.0897, 0.0506, -0.0364], [-0.0020, 0.1203, 0.0284], [ 0.0855, 0.0118, 0.0283], [-0.0539, 0.0658, 0.1047], [-0.0057, 0.0116, 0.0700], [-0.0412, 0.0281, -0.0039], [ 0.1106, 0.1171, 0.1220], [-0.0248, 0.0682, -0.0481], [ 0.0815, 0.0846, 0.1207], [-0.0120, -0.0055, -0.0867], [-0.0749, -0.0634, -0.0456], [-0.1418, -0.1457, -0.1259] ] self.taesd_decoder_name = "taesd3_decoder" def process_in(self, latent): return (latent - self.shift_factor) * self.scale_factor def process_out(self, latent): return (latent / self.scale_factor) + self.shift_factor class StableAudio1(LatentFormat): latent_channels = 64
import comfy.utils import logging LORA_CLIP_MAP = { "mlp.fc1": "mlp_fc1", "mlp.fc2": "mlp_fc2", "self_attn.k_proj": "self_attn_k_proj", "self_attn.q_proj": "self_attn_q_proj", "self_attn.v_proj": "self_attn_v_proj", "self_attn.out_proj": "self_attn_out_proj", } def load_lora(lora, to_load): patch_dict = {} loaded_keys = set() for x in to_load: alpha_name = "{}.alpha".format(x) alpha = None if alpha_name in lora.keys(): alpha = lora[alpha_name].item() loaded_keys.add(alpha_name) dora_scale_name = "{}.dora_scale".format(x) dora_scale = None if dora_scale_name in lora.keys(): dora_scale = lora[dora_scale_name] loaded_keys.add(dora_scale_name) regular_lora = "{}.lora_up.weight".format(x) diffusers_lora = "{}_lora.up.weight".format(x) diffusers2_lora = "{}.lora_B.weight".format(x) diffusers3_lora = "{}.lora.up.weight".format(x) transformers_lora = "{}.lora_linear_layer.up.weight".format(x) A_name = None if regular_lora in lora.keys(): A_name = regular_lora B_name = "{}.lora_down.weight".format(x) mid_name = "{}.lora_mid.weight".format(x) elif diffusers_lora in lora.keys(): A_name = diffusers_lora B_name = "{}_lora.down.weight".format(x) mid_name = None elif diffusers2_lora in lora.keys(): A_name = diffusers2_lora B_name = "{}.lora_A.weight".format(x) mid_name = None elif diffusers3_lora in lora.keys(): A_name = diffusers3_lora B_name = "{}.lora.down.weight".format(x) mid_name = None elif transformers_lora in lora.keys(): A_name = transformers_lora B_name ="{}.lora_linear_layer.down.weight".format(x) mid_name = None if A_name is not None: mid = None if mid_name is not None and mid_name in lora.keys(): mid = lora[mid_name] loaded_keys.add(mid_name) patch_dict[to_load[x]] = ("lora", (lora[A_name], lora[B_name], alpha, mid, dora_scale)) loaded_keys.add(A_name) loaded_keys.add(B_name) hada_w1_a_name = "{}.hada_w1_a".format(x) hada_w1_b_name = "{}.hada_w1_b".format(x) hada_w2_a_name = "{}.hada_w2_a".format(x) hada_w2_b_name = "{}.hada_w2_b".format(x) hada_t1_name = "{}.hada_t1".format(x) hada_t2_name = "{}.hada_t2".format(x) if hada_w1_a_name in lora.keys(): hada_t1 = None hada_t2 = None if hada_t1_name in lora.keys(): hada_t1 = lora[hada_t1_name] hada_t2 = lora[hada_t2_name] loaded_keys.add(hada_t1_name) loaded_keys.add(hada_t2_name) patch_dict[to_load[x]] = ("loha", (lora[hada_w1_a_name], lora[hada_w1_b_name], alpha, lora[hada_w2_a_name], lora[hada_w2_b_name], hada_t1, hada_t2, dora_scale)) loaded_keys.add(hada_w1_a_name) loaded_keys.add(hada_w1_b_name) loaded_keys.add(hada_w2_a_name) loaded_keys.add(hada_w2_b_name) lokr_w1_name = "{}.lokr_w1".format(x) lokr_w2_name = "{}.lokr_w2".format(x) lokr_w1_a_name = "{}.lokr_w1_a".format(x) lokr_w1_b_name = "{}.lokr_w1_b".format(x) lokr_t2_name = "{}.lokr_t2".format(x) lokr_w2_a_name = "{}.lokr_w2_a".format(x) lokr_w2_b_name = "{}.lokr_w2_b".format(x) lokr_w1 = None if lokr_w1_name in lora.keys(): lokr_w1 = lora[lokr_w1_name] loaded_keys.add(lokr_w1_name) lokr_w2 = None if lokr_w2_name in lora.keys(): lokr_w2 = lora[lokr_w2_name] loaded_keys.add(lokr_w2_name) lokr_w1_a = None if lokr_w1_a_name in lora.keys(): lokr_w1_a = lora[lokr_w1_a_name] loaded_keys.add(lokr_w1_a_name) lokr_w1_b = None if lokr_w1_b_name in lora.keys(): lokr_w1_b = lora[lokr_w1_b_name] loaded_keys.add(lokr_w1_b_name) lokr_w2_a = None if lokr_w2_a_name in lora.keys(): lokr_w2_a = lora[lokr_w2_a_name] loaded_keys.add(lokr_w2_a_name) lokr_w2_b = None if lokr_w2_b_name in lora.keys(): lokr_w2_b = lora[lokr_w2_b_name] loaded_keys.add(lokr_w2_b_name) lokr_t2 = None if lokr_t2_name in lora.keys(): lokr_t2 = lora[lokr_t2_name] loaded_keys.add(lokr_t2_name) if (lokr_w1 is not None) or (lokr_w2 is not None) or (lokr_w1_a is not None) or (lokr_w2_a is not None): patch_dict[to_load[x]] = ("lokr", (lokr_w1, lokr_w2, alpha, lokr_w1_a, lokr_w1_b, lokr_w2_a, lokr_w2_b, lokr_t2, dora_scale)) a1_name = "{}.a1.weight".format(x) a2_name = "{}.a2.weight".format(x) b1_name = "{}.b1.weight".format(x) b2_name = "{}.b2.weight".format(x) if a1_name in lora: patch_dict[to_load[x]] = ("glora", (lora[a1_name], lora[a2_name], lora[b1_name], lora[b2_name], alpha, dora_scale)) loaded_keys.add(a1_name) loaded_keys.add(a2_name) loaded_keys.add(b1_name) loaded_keys.add(b2_name) w_norm_name = "{}.w_norm".format(x) b_norm_name = "{}.b_norm".format(x) w_norm = lora.get(w_norm_name, None) b_norm = lora.get(b_norm_name, None) if w_norm is not None: loaded_keys.add(w_norm_name) patch_dict[to_load[x]] = ("diff", (w_norm,)) if b_norm is not None: loaded_keys.add(b_norm_name) patch_dict["{}.bias".format(to_load[x][:-len(".weight")])] = ("diff", (b_norm,)) diff_name = "{}.diff".format(x) diff_weight = lora.get(diff_name, None) if diff_weight is not None: patch_dict[to_load[x]] = ("diff", (diff_weight,)) loaded_keys.add(diff_name) diff_bias_name = "{}.diff_b".format(x) diff_bias = lora.get(diff_bias_name, None) if diff_bias is not None: patch_dict["{}.bias".format(to_load[x][:-len(".weight")])] = ("diff", (diff_bias,)) loaded_keys.add(diff_bias_name) for x in lora.keys(): if x not in loaded_keys: logging.warning("lora key not loaded: {}".format(x)) return patch_dict def model_lora_keys_clip(model, key_map={}): sdk = model.state_dict().keys() text_model_lora_key = "lora_te_text_model_encoder_layers_{}_{}" clip_l_present = False for b in range(32): for c in LORA_CLIP_MAP: k = "clip_h.transformer.text_model.encoder.layers.{}.{}.weight".format(b, c) if k in sdk: lora_key = text_model_lora_key.format(b, LORA_CLIP_MAP[c]) key_map[lora_key] = k lora_key = "lora_te1_text_model_encoder_layers_{}_{}".format(b, LORA_CLIP_MAP[c]) key_map[lora_key] = k lora_key = "text_encoder.text_model.encoder.layers.{}.{}".format(b, c) key_map[lora_key] = k k = "clip_l.transformer.text_model.encoder.layers.{}.{}.weight".format(b, c) if k in sdk: lora_key = text_model_lora_key.format(b, LORA_CLIP_MAP[c]) key_map[lora_key] = k lora_key = "lora_te1_text_model_encoder_layers_{}_{}".format(b, LORA_CLIP_MAP[c]) key_map[lora_key] = k clip_l_present = True lora_key = "text_encoder.text_model.encoder.layers.{}.{}".format(b, c) key_map[lora_key] = k k = "clip_g.transformer.text_model.encoder.layers.{}.{}.weight".format(b, c) if k in sdk: if clip_l_present: lora_key = "lora_te2_text_model_encoder_layers_{}_{}".format(b, LORA_CLIP_MAP[c]) key_map[lora_key] = k lora_key = "text_encoder_2.text_model.encoder.layers.{}.{}".format(b, c) key_map[lora_key] = k else: lora_key = "lora_te_text_model_encoder_layers_{}_{}".format(b, LORA_CLIP_MAP[c]) key_map[lora_key] = k lora_key = "text_encoder.text_model.encoder.layers.{}.{}".format(b, c) key_map[lora_key] = k lora_key = "lora_prior_te_text_model_encoder_layers_{}_{}".format(b, LORA_CLIP_MAP[c]) key_map[lora_key] = k for k in sdk: if k.startswith("t5xxl.transformer.") and k.endswith(".weight"): l_key = k[len("t5xxl.transformer."):-len(".weight")] lora_key = "lora_te3_{}".format(l_key.replace(".", "_")) key_map[lora_key] = k k = "clip_g.transformer.text_projection.weight" if k in sdk: key_map["lora_prior_te_text_projection"] = k key_map["lora_te2_text_projection"] = k k = "clip_l.transformer.text_projection.weight" if k in sdk: key_map["lora_te1_text_projection"] = k return key_map def model_lora_keys_unet(model, key_map={}): sd = model.state_dict() sdk = sd.keys() for k in sdk: if k.startswith("diffusion_model.") and k.endswith(".weight"): key_lora = k[len("diffusion_model."):-len(".weight")].replace(".", "_") key_map["lora_unet_{}".format(key_lora)] = k key_map["lora_prior_unet_{}".format(key_lora)] = k diffusers_keys = comfy.utils.unet_to_diffusers(model.model_config.unet_config) for k in diffusers_keys: if k.endswith(".weight"): unet_key = "diffusion_model.{}".format(diffusers_keys[k]) key_lora = k[:-len(".weight")].replace(".", "_") key_map["lora_unet_{}".format(key_lora)] = unet_key diffusers_lora_prefix = ["", "unet."] for p in diffusers_lora_prefix: diffusers_lora_key = "{}{}".format(p, k[:-len(".weight")].replace(".to_", ".processor.to_")) if diffusers_lora_key.endswith(".to_out.0"): diffusers_lora_key = diffusers_lora_key[:-2] key_map[diffusers_lora_key] = unet_key if isinstance(model, comfy.model_base.SD3): diffusers_keys = comfy.utils.mmdit_to_diffusers(model.model_config.unet_config, output_prefix="diffusion_model.") for k in diffusers_keys: if k.endswith(".weight"): to = diffusers_keys[k] key_lora = "transformer.{}".format(k[:-len(".weight")]) key_map[key_lora] = to key_lora = "base_model.model.{}".format(k[:-len(".weight")]) key_map[key_lora] = to key_lora = "lora_transformer_{}".format(k[:-len(".weight")].replace(".", "_")) key_map[key_lora] = to return key_map
import torch import logging from comfy.ldm.modules.diffusionmodules.openaimodel import UNetModel, Timestep from comfy.ldm.cascade.stage_c import StageC from comfy.ldm.cascade.stage_b import StageB from comfy.ldm.modules.encoders.noise_aug_modules import CLIPEmbeddingNoiseAugmentation from comfy.ldm.modules.diffusionmodules.upscaling import ImageConcatWithNoiseAugmentation from comfy.ldm.modules.diffusionmodules.mmdit import OpenAISignatureMMDITWrapper import comfy.ldm.audio.dit import comfy.ldm.audio.embedders import comfy.model_management import comfy.conds import comfy.ops from enum import Enum from . import utils import comfy.latent_formats import math class ModelType(Enum): EPS = 1 V_PREDICTION = 2 V_PREDICTION_EDM = 3 STABLE_CASCADE = 4 EDM = 5 FLOW = 6 V_PREDICTION_CONTINUOUS = 7 from comfy.model_sampling import EPS, V_PREDICTION, EDM, ModelSamplingDiscrete, ModelSamplingContinuousEDM, StableCascadeSampling, ModelSamplingContinuousV def model_sampling(model_config, model_type): s = ModelSamplingDiscrete if model_type == ModelType.EPS: c = EPS elif model_type == ModelType.V_PREDICTION: c = V_PREDICTION elif model_type == ModelType.V_PREDICTION_EDM: c = V_PREDICTION s = ModelSamplingContinuousEDM elif model_type == ModelType.FLOW: c = comfy.model_sampling.CONST s = comfy.model_sampling.ModelSamplingDiscreteFlow elif model_type == ModelType.STABLE_CASCADE: c = EPS s = StableCascadeSampling elif model_type == ModelType.EDM: c = EDM s = ModelSamplingContinuousEDM elif model_type == ModelType.V_PREDICTION_CONTINUOUS: c = V_PREDICTION s = ModelSamplingContinuousV class ModelSampling(s, c): pass return ModelSampling(model_config) class BaseModel(torch.nn.Module): def __init__(self, model_config, model_type=ModelType.EPS, device=None, unet_model=UNetModel): super().__init__() unet_config = model_config.unet_config self.latent_format = model_config.latent_format self.model_config = model_config self.manual_cast_dtype = model_config.manual_cast_dtype if not unet_config.get("disable_unet_model_creation", False): if self.manual_cast_dtype is not None: operations = comfy.ops.manual_cast else: operations = comfy.ops.disable_weight_init self.diffusion_model = unet_model(**unet_config, device=device, operations=operations) if comfy.model_management.force_channels_last(): self.diffusion_model.to(memory_format=torch.channels_last) logging.debug("using channels last mode for diffusion model") self.model_type = model_type self.model_sampling = model_sampling(model_config, model_type) self.adm_channels = unet_config.get("adm_in_channels", None) if self.adm_channels is None: self.adm_channels = 0 self.concat_keys = () logging.info("model_type {}".format(model_type.name)) logging.debug("adm {}".format(self.adm_channels)) def apply_model(self, x, t, c_concat=None, c_crossattn=None, control=None, transformer_options={}, **kwargs): sigma = t xc = self.model_sampling.calculate_input(sigma, x) if c_concat is not None: xc = torch.cat([xc] + [c_concat], dim=1) context = c_crossattn dtype = self.get_dtype() if self.manual_cast_dtype is not None: dtype = self.manual_cast_dtype xc = xc.to(dtype) t = self.model_sampling.timestep(t).float() context = context.to(dtype) extra_conds = {} for o in kwargs: extra = kwargs[o] if hasattr(extra, "dtype"): if extra.dtype != torch.int and extra.dtype != torch.long: extra = extra.to(dtype) extra_conds[o] = extra model_output = self.diffusion_model(xc, t, context=context, control=control, transformer_options=transformer_options, **extra_conds).float() return self.model_sampling.calculate_denoised(sigma, model_output, x) def get_dtype(self): return self.diffusion_model.dtype def is_adm(self): return self.adm_channels > 0 def encode_adm(self, **kwargs): return None def extra_conds(self, **kwargs): out = {} if len(self.concat_keys) > 0: cond_concat = [] denoise_mask = kwargs.get("concat_mask", kwargs.get("denoise_mask", None)) concat_latent_image = kwargs.get("concat_latent_image", None) if concat_latent_image is None: concat_latent_image = kwargs.get("latent_image", None) else: concat_latent_image = self.process_latent_in(concat_latent_image) noise = kwargs.get("noise", None) device = kwargs["device"] if concat_latent_image.shape[1:] != noise.shape[1:]: concat_latent_image = utils.common_upscale(concat_latent_image, noise.shape[-1], noise.shape[-2], "bilinear", "center") concat_latent_image = utils.resize_to_batch_size(concat_latent_image, noise.shape[0]) if denoise_mask is not None: if len(denoise_mask.shape) == len(noise.shape): denoise_mask = denoise_mask[:,:1] denoise_mask = denoise_mask.reshape((-1, 1, denoise_mask.shape[-2], denoise_mask.shape[-1])) if denoise_mask.shape[-2:] != noise.shape[-2:]: denoise_mask = utils.common_upscale(denoise_mask, noise.shape[-1], noise.shape[-2], "bilinear", "center") denoise_mask = utils.resize_to_batch_size(denoise_mask.round(), noise.shape[0]) for ck in self.concat_keys: if denoise_mask is not None: if ck == "mask": cond_concat.append(denoise_mask.to(device)) elif ck == "masked_image": cond_concat.append(concat_latent_image.to(device)) else: if ck == "mask": cond_concat.append(torch.ones_like(noise)[:,:1]) elif ck == "masked_image": cond_concat.append(self.blank_inpaint_image_like(noise)) data = torch.cat(cond_concat, dim=1) out['c_concat'] = comfy.conds.CONDNoiseShape(data) adm = self.encode_adm(**kwargs) if adm is not None: out['y'] = comfy.conds.CONDRegular(adm) cross_attn = kwargs.get("cross_attn", None) if cross_attn is not None: out['c_crossattn'] = comfy.conds.CONDCrossAttn(cross_attn) cross_attn_cnet = kwargs.get("cross_attn_controlnet", None) if cross_attn_cnet is not None: out['crossattn_controlnet'] = comfy.conds.CONDCrossAttn(cross_attn_cnet) c_concat = kwargs.get("noise_concat", None) if c_concat is not None: out['c_concat'] = comfy.conds.CONDNoiseShape(c_concat) return out def load_model_weights(self, sd, unet_prefix=""): to_load = {} keys = list(sd.keys()) for k in keys: if k.startswith(unet_prefix): to_load[k[len(unet_prefix):]] = sd.pop(k) to_load = self.model_config.process_unet_state_dict(to_load) m, u = self.diffusion_model.load_state_dict(to_load, strict=False) if len(m) > 0: logging.warning("unet missing: {}".format(m)) if len(u) > 0: logging.warning("unet unexpected: {}".format(u)) del to_load return self def process_latent_in(self, latent): return self.latent_format.process_in(latent) def process_latent_out(self, latent): return self.latent_format.process_out(latent) def state_dict_for_saving(self, clip_state_dict=None, vae_state_dict=None, clip_vision_state_dict=None): extra_sds = [] if clip_state_dict is not None: extra_sds.append(self.model_config.process_clip_state_dict_for_saving(clip_state_dict)) if vae_state_dict is not None: extra_sds.append(self.model_config.process_vae_state_dict_for_saving(vae_state_dict)) if clip_vision_state_dict is not None: extra_sds.append(self.model_config.process_clip_vision_state_dict_for_saving(clip_vision_state_dict)) unet_state_dict = self.diffusion_model.state_dict() unet_state_dict = self.model_config.process_unet_state_dict_for_saving(unet_state_dict) if self.model_type == ModelType.V_PREDICTION: unet_state_dict["v_pred"] = torch.tensor([]) for sd in extra_sds: unet_state_dict.update(sd) return unet_state_dict def set_inpaint(self): self.concat_keys = ("mask", "masked_image") def blank_inpaint_image_like(latent_image): blank_image = torch.ones_like(latent_image) blank_image[:,0] *= 0.8223 blank_image[:,1] *= -0.6876 blank_image[:,2] *= 0.6364 blank_image[:,3] *= 0.1380 return blank_image self.blank_inpaint_image_like = blank_inpaint_image_like def memory_required(self, input_shape): if comfy.model_management.xformers_enabled() or comfy.model_management.pytorch_attention_flash_attention(): dtype = self.get_dtype() if self.manual_cast_dtype is not None: dtype = self.manual_cast_dtype area = input_shape[0] * math.prod(input_shape[2:]) return (area * comfy.model_management.dtype_size(dtype) / 50) * (1024 * 1024) else: area = input_shape[0] * math.prod(input_shape[2:]) return (((area * 0.6) / 0.9) + 1024) * (1024 * 1024) def unclip_adm(unclip_conditioning, device, noise_augmentor, noise_augment_merge=0.0, seed=None): adm_inputs = [] weights = [] noise_aug = [] for unclip_cond in unclip_conditioning: for adm_cond in unclip_cond["clip_vision_output"].image_embeds: weight = unclip_cond["strength"] noise_augment = unclip_cond["noise_augmentation"] noise_level = round((noise_augmentor.max_noise_level - 1) * noise_augment) c_adm, noise_level_emb = noise_augmentor(adm_cond.to(device), noise_level=torch.tensor([noise_level], device=device), seed=seed) adm_out = torch.cat((c_adm, noise_level_emb), 1) * weight weights.append(weight) noise_aug.append(noise_augment) adm_inputs.append(adm_out) if len(noise_aug) > 1: adm_out = torch.stack(adm_inputs).sum(0) noise_augment = noise_augment_merge noise_level = round((noise_augmentor.max_noise_level - 1) * noise_augment) c_adm, noise_level_emb = noise_augmentor(adm_out[:, :noise_augmentor.time_embed.dim], noise_level=torch.tensor([noise_level], device=device)) adm_out = torch.cat((c_adm, noise_level_emb), 1) return adm_out class SD21UNCLIP(BaseModel): def __init__(self, model_config, noise_aug_config, model_type=ModelType.V_PREDICTION, device=None): super().__init__(model_config, model_type, device=device) self.noise_augmentor = CLIPEmbeddingNoiseAugmentation(**noise_aug_config) def encode_adm(self, **kwargs): unclip_conditioning = kwargs.get("unclip_conditioning", None) device = kwargs["device"] if unclip_conditioning is None: return torch.zeros((1, self.adm_channels)) else: return unclip_adm(unclip_conditioning, device, self.noise_augmentor, kwargs.get("unclip_noise_augment_merge", 0.05), kwargs.get("seed", 0) - 10) def sdxl_pooled(args, noise_augmentor): if "unclip_conditioning" in args: return unclip_adm(args.get("unclip_conditioning", None), args["device"], noise_augmentor, seed=args.get("seed", 0) - 10)[:,:1280] else: return args["pooled_output"] class SDXLRefiner(BaseModel): def __init__(self, model_config, model_type=ModelType.EPS, device=None): super().__init__(model_config, model_type, device=device) self.embedder = Timestep(256) self.noise_augmentor = CLIPEmbeddingNoiseAugmentation(**{"noise_schedule_config": {"timesteps": 1000, "beta_schedule": "squaredcos_cap_v2"}, "timestep_dim": 1280}) def encode_adm(self, **kwargs): clip_pooled = sdxl_pooled(kwargs, self.noise_augmentor) width = kwargs.get("width", 768) height = kwargs.get("height", 768) crop_w = kwargs.get("crop_w", 0) crop_h = kwargs.get("crop_h", 0) if kwargs.get("prompt_type", "") == "negative": aesthetic_score = kwargs.get("aesthetic_score", 2.5) else: aesthetic_score = kwargs.get("aesthetic_score", 6) out = [] out.append(self.embedder(torch.Tensor([height]))) out.append(self.embedder(torch.Tensor([width]))) out.append(self.embedder(torch.Tensor([crop_h]))) out.append(self.embedder(torch.Tensor([crop_w]))) out.append(self.embedder(torch.Tensor([aesthetic_score]))) flat = torch.flatten(torch.cat(out)).unsqueeze(dim=0).repeat(clip_pooled.shape[0], 1) return torch.cat((clip_pooled.to(flat.device), flat), dim=1) class SDXL(BaseModel): def __init__(self, model_config, model_type=ModelType.EPS, device=None): super().__init__(model_config, model_type, device=device) self.embedder = Timestep(256) self.noise_augmentor = CLIPEmbeddingNoiseAugmentation(**{"noise_schedule_config": {"timesteps": 1000, "beta_schedule": "squaredcos_cap_v2"}, "timestep_dim": 1280}) def encode_adm(self, **kwargs): clip_pooled = sdxl_pooled(kwargs, self.noise_augmentor) width = kwargs.get("width", 768) height = kwargs.get("height", 768) crop_w = kwargs.get("crop_w", 0) crop_h = kwargs.get("crop_h", 0) target_width = kwargs.get("target_width", width) target_height = kwargs.get("target_height", height) out = [] out.append(self.embedder(torch.Tensor([height]))) out.append(self.embedder(torch.Tensor([width]))) out.append(self.embedder(torch.Tensor([crop_h]))) out.append(self.embedder(torch.Tensor([crop_w]))) out.append(self.embedder(torch.Tensor([target_height]))) out.append(self.embedder(torch.Tensor([target_width]))) flat = torch.flatten(torch.cat(out)).unsqueeze(dim=0).repeat(clip_pooled.shape[0], 1) return torch.cat((clip_pooled.to(flat.device), flat), dim=1) class SVD_img2vid(BaseModel): def __init__(self, model_config, model_type=ModelType.V_PREDICTION_EDM, device=None): super().__init__(model_config, model_type, device=device) self.embedder = Timestep(256) def encode_adm(self, **kwargs): fps_id = kwargs.get("fps", 6) - 1 motion_bucket_id = kwargs.get("motion_bucket_id", 127) augmentation = kwargs.get("augmentation_level", 0) out = [] out.append(self.embedder(torch.Tensor([fps_id]))) out.append(self.embedder(torch.Tensor([motion_bucket_id]))) out.append(self.embedder(torch.Tensor([augmentation]))) flat = torch.flatten(torch.cat(out)).unsqueeze(dim=0) return flat def extra_conds(self, **kwargs): out = {} adm = self.encode_adm(**kwargs) if adm is not None: out['y'] = comfy.conds.CONDRegular(adm) latent_image = kwargs.get("concat_latent_image", None) noise = kwargs.get("noise", None) device = kwargs["device"] if latent_image is None: latent_image = torch.zeros_like(noise) if latent_image.shape[1:] != noise.shape[1:]: latent_image = utils.common_upscale(latent_image, noise.shape[-1], noise.shape[-2], "bilinear", "center") latent_image = utils.resize_to_batch_size(latent_image, noise.shape[0]) out['c_concat'] = comfy.conds.CONDNoiseShape(latent_image) cross_attn = kwargs.get("cross_attn", None) if cross_attn is not None: out['c_crossattn'] = comfy.conds.CONDCrossAttn(cross_attn) if "time_conditioning" in kwargs: out["time_context"] = comfy.conds.CONDCrossAttn(kwargs["time_conditioning"]) out['num_video_frames'] = comfy.conds.CONDConstant(noise.shape[0]) return out class SV3D_u(SVD_img2vid): def encode_adm(self, **kwargs): augmentation = kwargs.get("augmentation_level", 0) out = [] out.append(self.embedder(torch.flatten(torch.Tensor([augmentation])))) flat = torch.flatten(torch.cat(out)).unsqueeze(dim=0) return flat class SV3D_p(SVD_img2vid): def __init__(self, model_config, model_type=ModelType.V_PREDICTION_EDM, device=None): super().__init__(model_config, model_type, device=device) self.embedder_512 = Timestep(512) def encode_adm(self, **kwargs): augmentation = kwargs.get("augmentation_level", 0) elevation = kwargs.get("elevation", 0) azimuth = kwargs.get("azimuth", 0) noise = kwargs.get("noise", None) out = [] out.append(self.embedder(torch.flatten(torch.Tensor([augmentation])))) out.append(self.embedder_512(torch.deg2rad(torch.fmod(torch.flatten(90 - torch.Tensor([elevation])), 360.0)))) out.append(self.embedder_512(torch.deg2rad(torch.fmod(torch.flatten(torch.Tensor([azimuth])), 360.0)))) out = list(map(lambda a: utils.resize_to_batch_size(a, noise.shape[0]), out)) return torch.cat(out, dim=1) class Stable_Zero123(BaseModel): def __init__(self, model_config, model_type=ModelType.EPS, device=None, cc_projection_weight=None, cc_projection_bias=None): super().__init__(model_config, model_type, device=device) self.cc_projection = comfy.ops.manual_cast.Linear(cc_projection_weight.shape[1], cc_projection_weight.shape[0], dtype=self.get_dtype(), device=device) self.cc_projection.weight.copy_(cc_projection_weight) self.cc_projection.bias.copy_(cc_projection_bias) def extra_conds(self, **kwargs): out = {} latent_image = kwargs.get("concat_latent_image", None) noise = kwargs.get("noise", None) if latent_image is None: latent_image = torch.zeros_like(noise) if latent_image.shape[1:] != noise.shape[1:]: latent_image = utils.common_upscale(latent_image, noise.shape[-1], noise.shape[-2], "bilinear", "center") latent_image = utils.resize_to_batch_size(latent_image, noise.shape[0]) out['c_concat'] = comfy.conds.CONDNoiseShape(latent_image) cross_attn = kwargs.get("cross_attn", None) if cross_attn is not None: if cross_attn.shape[-1] != 768: cross_attn = self.cc_projection(cross_attn) out['c_crossattn'] = comfy.conds.CONDCrossAttn(cross_attn) return out class SD_X4Upscaler(BaseModel): def __init__(self, model_config, model_type=ModelType.V_PREDICTION, device=None): super().__init__(model_config, model_type, device=device) self.noise_augmentor = ImageConcatWithNoiseAugmentation(noise_schedule_config={"linear_start": 0.0001, "linear_end": 0.02}, max_noise_level=350) def extra_conds(self, **kwargs): out = {} image = kwargs.get("concat_image", None) noise = kwargs.get("noise", None) noise_augment = kwargs.get("noise_augmentation", 0.0) device = kwargs["device"] seed = kwargs["seed"] - 10 noise_level = round((self.noise_augmentor.max_noise_level) * noise_augment) if image is None: image = torch.zeros_like(noise)[:,:3] if image.shape[1:] != noise.shape[1:]: image = utils.common_upscale(image.to(device), noise.shape[-1], noise.shape[-2], "bilinear", "center") noise_level = torch.tensor([noise_level], device=device) if noise_augment > 0: image, noise_level = self.noise_augmentor(image.to(device), noise_level=noise_level, seed=seed) image = utils.resize_to_batch_size(image, noise.shape[0]) out['c_concat'] = comfy.conds.CONDNoiseShape(image) out['y'] = comfy.conds.CONDRegular(noise_level) return out class IP2P: def extra_conds(self, **kwargs): out = {} image = kwargs.get("concat_latent_image", None) noise = kwargs.get("noise", None) device = kwargs["device"] if image is None: image = torch.zeros_like(noise) if image.shape[1:] != noise.shape[1:]: image = utils.common_upscale(image.to(device), noise.shape[-1], noise.shape[-2], "bilinear", "center") image = utils.resize_to_batch_size(image, noise.shape[0]) out['c_concat'] = comfy.conds.CONDNoiseShape(self.process_ip2p_image_in(image)) adm = self.encode_adm(**kwargs) if adm is not None: out['y'] = comfy.conds.CONDRegular(adm) return out class SD15_instructpix2pix(IP2P, BaseModel): def __init__(self, model_config, model_type=ModelType.EPS, device=None): super().__init__(model_config, model_type, device=device) self.process_ip2p_image_in = lambda image: image class SDXL_instructpix2pix(IP2P, SDXL): def __init__(self, model_config, model_type=ModelType.EPS, device=None): super().__init__(model_config, model_type, device=device) if model_type == ModelType.V_PREDICTION_EDM: self.process_ip2p_image_in = lambda image: comfy.latent_formats.SDXL().process_in(image) else: self.process_ip2p_image_in = lambda image: image class StableCascade_C(BaseModel): def __init__(self, model_config, model_type=ModelType.STABLE_CASCADE, device=None): super().__init__(model_config, model_type, device=device, unet_model=StageC) self.diffusion_model.eval().requires_grad_(False) def extra_conds(self, **kwargs): out = {} clip_text_pooled = kwargs["pooled_output"] if clip_text_pooled is not None: out['clip_text_pooled'] = comfy.conds.CONDRegular(clip_text_pooled) if "unclip_conditioning" in kwargs: embeds = [] for unclip_cond in kwargs["unclip_conditioning"]: weight = unclip_cond["strength"] embeds.append(unclip_cond["clip_vision_output"].image_embeds.unsqueeze(0) * weight) clip_img = torch.cat(embeds, dim=1) else: clip_img = torch.zeros((1, 1, 768)) out["clip_img"] = comfy.conds.CONDRegular(clip_img) out["sca"] = comfy.conds.CONDRegular(torch.zeros((1,))) out["crp"] = comfy.conds.CONDRegular(torch.zeros((1,))) cross_attn = kwargs.get("cross_attn", None) if cross_attn is not None: out['clip_text'] = comfy.conds.CONDCrossAttn(cross_attn) return out class StableCascade_B(BaseModel): def __init__(self, model_config, model_type=ModelType.STABLE_CASCADE, device=None): super().__init__(model_config, model_type, device=device, unet_model=StageB) self.diffusion_model.eval().requires_grad_(False) def extra_conds(self, **kwargs): out = {} noise = kwargs.get("noise", None) clip_text_pooled = kwargs["pooled_output"] if clip_text_pooled is not None: out['clip'] = comfy.conds.CONDRegular(clip_text_pooled) prior = kwargs.get("stable_cascade_prior", torch.zeros((1, 16, (noise.shape[2] * 4) out["effnet"] = comfy.conds.CONDRegular(prior) out["sca"] = comfy.conds.CONDRegular(torch.zeros((1,))) return out class SD3(BaseModel): def __init__(self, model_config, model_type=ModelType.FLOW, device=None): super().__init__(model_config, model_type, device=device, unet_model=OpenAISignatureMMDITWrapper) def encode_adm(self, **kwargs): return kwargs["pooled_output"] def extra_conds(self, **kwargs): out = super().extra_conds(**kwargs) cross_attn = kwargs.get("cross_attn", None) if cross_attn is not None: out['c_crossattn'] = comfy.conds.CONDRegular(cross_attn) return out def memory_required(self, input_shape): if comfy.model_management.xformers_enabled() or comfy.model_management.pytorch_attention_flash_attention(): dtype = self.get_dtype() if self.manual_cast_dtype is not None: dtype = self.manual_cast_dtype area = input_shape[0] * input_shape[2] * input_shape[3] return (area * comfy.model_management.dtype_size(dtype) * 0.012) * (1024 * 1024) else: area = input_shape[0] * input_shape[2] * input_shape[3] return (area * 0.3) * (1024 * 1024) class StableAudio1(BaseModel): def __init__(self, model_config, seconds_start_embedder_weights, seconds_total_embedder_weights, model_type=ModelType.V_PREDICTION_CONTINUOUS, device=None): super().__init__(model_config, model_type, device=device, unet_model=comfy.ldm.audio.dit.AudioDiffusionTransformer) self.seconds_start_embedder = comfy.ldm.audio.embedders.NumberConditioner(768, min_val=0, max_val=512) self.seconds_total_embedder = comfy.ldm.audio.embedders.NumberConditioner(768, min_val=0, max_val=512) self.seconds_start_embedder.load_state_dict(seconds_start_embedder_weights) self.seconds_total_embedder.load_state_dict(seconds_total_embedder_weights) def extra_conds(self, **kwargs): out = {} noise = kwargs.get("noise", None) device = kwargs["device"] seconds_start = kwargs.get("seconds_start", 0) seconds_total = kwargs.get("seconds_total", int(noise.shape[-1] / 21.53)) seconds_start_embed = self.seconds_start_embedder([seconds_start])[0].to(device) seconds_total_embed = self.seconds_total_embedder([seconds_total])[0].to(device) global_embed = torch.cat([seconds_start_embed, seconds_total_embed], dim=-1).reshape((1, -1)) out['global_embed'] = comfy.conds.CONDRegular(global_embed) cross_attn = kwargs.get("cross_attn", None) if cross_attn is not None: cross_attn = torch.cat([cross_attn.to(device), seconds_start_embed.repeat((cross_attn.shape[0], 1, 1)), seconds_total_embed.repeat((cross_attn.shape[0], 1, 1))], dim=1) out['c_crossattn'] = comfy.conds.CONDRegular(cross_attn) return out
import comfy.supported_models import comfy.supported_models_base import comfy.utils import math import logging import torch def count_blocks(state_dict_keys, prefix_string): count = 0 while True: c = False for k in state_dict_keys: if k.startswith(prefix_string.format(count)): c = True break if c == False: break count += 1 return count def calculate_transformer_depth(prefix, state_dict_keys, state_dict): context_dim = None use_linear_in_transformer = False transformer_prefix = prefix + "1.transformer_blocks." transformer_keys = sorted(list(filter(lambda a: a.startswith(transformer_prefix), state_dict_keys))) if len(transformer_keys) > 0: last_transformer_depth = count_blocks(state_dict_keys, transformer_prefix + '{}') context_dim = state_dict['{}0.attn2.to_k.weight'.format(transformer_prefix)].shape[1] use_linear_in_transformer = len(state_dict['{}1.proj_in.weight'.format(prefix)].shape) == 2 time_stack = '{}1.time_stack.0.attn1.to_q.weight'.format(prefix) in state_dict or '{}1.time_mix_blocks.0.attn1.to_q.weight'.format(prefix) in state_dict time_stack_cross = '{}1.time_stack.0.attn2.to_q.weight'.format(prefix) in state_dict or '{}1.time_mix_blocks.0.attn2.to_q.weight'.format(prefix) in state_dict return last_transformer_depth, context_dim, use_linear_in_transformer, time_stack, time_stack_cross return None def detect_unet_config(state_dict, key_prefix): state_dict_keys = list(state_dict.keys()) if '{}joint_blocks.0.context_block.attn.qkv.weight'.format(key_prefix) in state_dict_keys: unet_config = {} unet_config["in_channels"] = state_dict['{}x_embedder.proj.weight'.format(key_prefix)].shape[1] patch_size = state_dict['{}x_embedder.proj.weight'.format(key_prefix)].shape[2] unet_config["patch_size"] = patch_size unet_config["out_channels"] = state_dict['{}final_layer.linear.weight'.format(key_prefix)].shape[0] unet_config["depth"] = state_dict['{}x_embedder.proj.weight'.format(key_prefix)].shape[0] unet_config["input_size"] = None y_key = '{}y_embedder.mlp.0.weight'.format(key_prefix) if y_key in state_dict_keys: unet_config["adm_in_channels"] = state_dict[y_key].shape[1] context_key = '{}context_embedder.weight'.format(key_prefix) if context_key in state_dict_keys: in_features = state_dict[context_key].shape[1] out_features = state_dict[context_key].shape[0] unet_config["context_embedder_config"] = {"target": "torch.nn.Linear", "params": {"in_features": in_features, "out_features": out_features}} num_patches_key = '{}pos_embed'.format(key_prefix) if num_patches_key in state_dict_keys: num_patches = state_dict[num_patches_key].shape[1] unet_config["num_patches"] = num_patches unet_config["pos_embed_max_size"] = round(math.sqrt(num_patches)) rms_qk = '{}joint_blocks.0.context_block.attn.ln_q.weight'.format(key_prefix) if rms_qk in state_dict_keys: unet_config["qk_norm"] = "rms" unet_config["pos_embed_scaling_factor"] = None context_processor = '{}context_processor.layers.0.attn.qkv.weight'.format(key_prefix) if context_processor in state_dict_keys: unet_config["context_processor_layers"] = count_blocks(state_dict_keys, '{}context_processor.layers.'.format(key_prefix) + '{}.') return unet_config if '{}clf.1.weight'.format(key_prefix) in state_dict_keys: unet_config = {} text_mapper_name = '{}clip_txt_mapper.weight'.format(key_prefix) if text_mapper_name in state_dict_keys: unet_config['stable_cascade_stage'] = 'c' w = state_dict[text_mapper_name] if w.shape[0] == 1536: unet_config['c_cond'] = 1536 unet_config['c_hidden'] = [1536, 1536] unet_config['nhead'] = [24, 24] unet_config['blocks'] = [[4, 12], [12, 4]] elif w.shape[0] == 2048: unet_config['c_cond'] = 2048 elif '{}clip_mapper.weight'.format(key_prefix) in state_dict_keys: unet_config['stable_cascade_stage'] = 'b' w = state_dict['{}down_blocks.1.0.channelwise.0.weight'.format(key_prefix)] if w.shape[-1] == 640: unet_config['c_hidden'] = [320, 640, 1280, 1280] unet_config['nhead'] = [-1, -1, 20, 20] unet_config['blocks'] = [[2, 6, 28, 6], [6, 28, 6, 2]] unet_config['block_repeat'] = [[1, 1, 1, 1], [3, 3, 2, 2]] elif w.shape[-1] == 576: unet_config['c_hidden'] = [320, 576, 1152, 1152] unet_config['nhead'] = [-1, 9, 18, 18] unet_config['blocks'] = [[2, 4, 14, 4], [4, 14, 4, 2]] unet_config['block_repeat'] = [[1, 1, 1, 1], [2, 2, 2, 2]] return unet_config if '{}transformer.rotary_pos_emb.inv_freq'.format(key_prefix) in state_dict_keys: unet_config = {} unet_config["audio_model"] = "dit1.0" return unet_config unet_config = { "use_checkpoint": False, "image_size": 32, "use_spatial_transformer": True, "legacy": False } y_input = '{}label_emb.0.0.weight'.format(key_prefix) if y_input in state_dict_keys: unet_config["num_classes"] = "sequential" unet_config["adm_in_channels"] = state_dict[y_input].shape[1] else: unet_config["adm_in_channels"] = None model_channels = state_dict['{}input_blocks.0.0.weight'.format(key_prefix)].shape[0] in_channels = state_dict['{}input_blocks.0.0.weight'.format(key_prefix)].shape[1] out_key = '{}out.2.weight'.format(key_prefix) if out_key in state_dict: out_channels = state_dict[out_key].shape[0] else: out_channels = 4 num_res_blocks = [] channel_mult = [] attention_resolutions = [] transformer_depth = [] transformer_depth_output = [] context_dim = None use_linear_in_transformer = False video_model = False video_model_cross = False current_res = 1 count = 0 last_res_blocks = 0 last_channel_mult = 0 input_block_count = count_blocks(state_dict_keys, '{}input_blocks'.format(key_prefix) + '.{}.') for count in range(input_block_count): prefix = '{}input_blocks.{}.'.format(key_prefix, count) prefix_output = '{}output_blocks.{}.'.format(key_prefix, input_block_count - count - 1) block_keys = sorted(list(filter(lambda a: a.startswith(prefix), state_dict_keys))) if len(block_keys) == 0: break block_keys_output = sorted(list(filter(lambda a: a.startswith(prefix_output), state_dict_keys))) if "{}0.op.weight".format(prefix) in block_keys: num_res_blocks.append(last_res_blocks) channel_mult.append(last_channel_mult) current_res *= 2 last_res_blocks = 0 last_channel_mult = 0 out = calculate_transformer_depth(prefix_output, state_dict_keys, state_dict) if out is not None: transformer_depth_output.append(out[0]) else: transformer_depth_output.append(0) else: res_block_prefix = "{}0.in_layers.0.weight".format(prefix) if res_block_prefix in block_keys: last_res_blocks += 1 last_channel_mult = state_dict["{}0.out_layers.3.weight".format(prefix)].shape[0] out = calculate_transformer_depth(prefix, state_dict_keys, state_dict) if out is not None: transformer_depth.append(out[0]) if context_dim is None: context_dim = out[1] use_linear_in_transformer = out[2] video_model = out[3] video_model_cross = out[4] else: transformer_depth.append(0) res_block_prefix = "{}0.in_layers.0.weight".format(prefix_output) if res_block_prefix in block_keys_output: out = calculate_transformer_depth(prefix_output, state_dict_keys, state_dict) if out is not None: transformer_depth_output.append(out[0]) else: transformer_depth_output.append(0) num_res_blocks.append(last_res_blocks) channel_mult.append(last_channel_mult) if "{}middle_block.1.proj_in.weight".format(key_prefix) in state_dict_keys: transformer_depth_middle = count_blocks(state_dict_keys, '{}middle_block.1.transformer_blocks.'.format(key_prefix) + '{}') elif "{}middle_block.0.in_layers.0.weight".format(key_prefix) in state_dict_keys: transformer_depth_middle = -1 else: transformer_depth_middle = -2 unet_config["in_channels"] = in_channels unet_config["out_channels"] = out_channels unet_config["model_channels"] = model_channels unet_config["num_res_blocks"] = num_res_blocks unet_config["transformer_depth"] = transformer_depth unet_config["transformer_depth_output"] = transformer_depth_output unet_config["channel_mult"] = channel_mult unet_config["transformer_depth_middle"] = transformer_depth_middle unet_config['use_linear_in_transformer'] = use_linear_in_transformer unet_config["context_dim"] = context_dim if video_model: unet_config["extra_ff_mix_layer"] = True unet_config["use_spatial_context"] = True unet_config["merge_strategy"] = "learned_with_images" unet_config["merge_factor"] = 0.0 unet_config["video_kernel_size"] = [3, 1, 1] unet_config["use_temporal_resblock"] = True unet_config["use_temporal_attention"] = True unet_config["disable_temporal_crossattention"] = not video_model_cross else: unet_config["use_temporal_resblock"] = False unet_config["use_temporal_attention"] = False return unet_config def model_config_from_unet_config(unet_config, state_dict=None): for model_config in comfy.supported_models.models: if model_config.matches(unet_config, state_dict): return model_config(unet_config) logging.error("no match {}".format(unet_config)) return None def model_config_from_unet(state_dict, unet_key_prefix, use_base_if_no_match=False): unet_config = detect_unet_config(state_dict, unet_key_prefix) model_config = model_config_from_unet_config(unet_config, state_dict) if model_config is None and use_base_if_no_match: return comfy.supported_models_base.BASE(unet_config) else: return model_config def unet_prefix_from_state_dict(state_dict): if "model.model.postprocess_conv.weight" in state_dict: unet_key_prefix = "model.model." else: unet_key_prefix = "model.diffusion_model." return unet_key_prefix def convert_config(unet_config): new_config = unet_config.copy() num_res_blocks = new_config.get("num_res_blocks", None) channel_mult = new_config.get("channel_mult", None) if isinstance(num_res_blocks, int): num_res_blocks = len(channel_mult) * [num_res_blocks] if "attention_resolutions" in new_config: attention_resolutions = new_config.pop("attention_resolutions") transformer_depth = new_config.get("transformer_depth", None) transformer_depth_middle = new_config.get("transformer_depth_middle", None) if isinstance(transformer_depth, int): transformer_depth = len(channel_mult) * [transformer_depth] if transformer_depth_middle is None: transformer_depth_middle = transformer_depth[-1] t_in = [] t_out = [] s = 1 for i in range(len(num_res_blocks)): res = num_res_blocks[i] d = 0 if s in attention_resolutions: d = transformer_depth[i] t_in += [d] * res t_out += [d] * (res + 1) s *= 2 transformer_depth = t_in transformer_depth_output = t_out new_config["transformer_depth"] = t_in new_config["transformer_depth_output"] = t_out new_config["transformer_depth_middle"] = transformer_depth_middle new_config["num_res_blocks"] = num_res_blocks return new_config def unet_config_from_diffusers_unet(state_dict, dtype=None): match = {} transformer_depth = [] attn_res = 1 down_blocks = count_blocks(state_dict, "down_blocks.{}") for i in range(down_blocks): attn_blocks = count_blocks(state_dict, "down_blocks.{}.attentions.".format(i) + '{}') res_blocks = count_blocks(state_dict, "down_blocks.{}.resnets.".format(i) + '{}') for ab in range(attn_blocks): transformer_count = count_blocks(state_dict, "down_blocks.{}.attentions.{}.transformer_blocks.".format(i, ab) + '{}') transformer_depth.append(transformer_count) if transformer_count > 0: match["context_dim"] = state_dict["down_blocks.{}.attentions.{}.transformer_blocks.0.attn2.to_k.weight".format(i, ab)].shape[1] attn_res *= 2 if attn_blocks == 0: for i in range(res_blocks): transformer_depth.append(0) match["transformer_depth"] = transformer_depth match["model_channels"] = state_dict["conv_in.weight"].shape[0] match["in_channels"] = state_dict["conv_in.weight"].shape[1] match["adm_in_channels"] = None if "class_embedding.linear_1.weight" in state_dict: match["adm_in_channels"] = state_dict["class_embedding.linear_1.weight"].shape[1] elif "add_embedding.linear_1.weight" in state_dict: match["adm_in_channels"] = state_dict["add_embedding.linear_1.weight"].shape[1] SDXL = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, 'num_classes': 'sequential', 'adm_in_channels': 2816, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 2, 2, 10, 10], 'channel_mult': [1, 2, 4], 'transformer_depth_middle': 10, 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, 'transformer_depth_output': [0, 0, 0, 2, 2, 2, 10, 10, 10], 'use_temporal_attention': False, 'use_temporal_resblock': False} SDXL_refiner = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, 'num_classes': 'sequential', 'adm_in_channels': 2560, 'dtype': dtype, 'in_channels': 4, 'model_channels': 384, 'num_res_blocks': [2, 2, 2, 2], 'transformer_depth': [0, 0, 4, 4, 4, 4, 0, 0], 'channel_mult': [1, 2, 4, 4], 'transformer_depth_middle': 4, 'use_linear_in_transformer': True, 'context_dim': 1280, 'num_head_channels': 64, 'transformer_depth_output': [0, 0, 0, 4, 4, 4, 4, 4, 4, 0, 0, 0], 'use_temporal_attention': False, 'use_temporal_resblock': False} SD21 = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, 'adm_in_channels': None, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, 'num_res_blocks': [2, 2, 2, 2], 'transformer_depth': [1, 1, 1, 1, 1, 1, 0, 0], 'channel_mult': [1, 2, 4, 4], 'transformer_depth_middle': 1, 'use_linear_in_transformer': True, 'context_dim': 1024, 'num_head_channels': 64, 'transformer_depth_output': [1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0], 'use_temporal_attention': False, 'use_temporal_resblock': False} SD21_uncliph = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, 'num_classes': 'sequential', 'adm_in_channels': 2048, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, 'num_res_blocks': [2, 2, 2, 2], 'transformer_depth': [1, 1, 1, 1, 1, 1, 0, 0], 'channel_mult': [1, 2, 4, 4], 'transformer_depth_middle': 1, 'use_linear_in_transformer': True, 'context_dim': 1024, 'num_head_channels': 64, 'transformer_depth_output': [1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0], 'use_temporal_attention': False, 'use_temporal_resblock': False} SD21_unclipl = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, 'num_classes': 'sequential', 'adm_in_channels': 1536, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, 'num_res_blocks': [2, 2, 2, 2], 'transformer_depth': [1, 1, 1, 1, 1, 1, 0, 0], 'channel_mult': [1, 2, 4, 4], 'transformer_depth_middle': 1, 'use_linear_in_transformer': True, 'context_dim': 1024, 'num_head_channels': 64, 'transformer_depth_output': [1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0], 'use_temporal_attention': False, 'use_temporal_resblock': False} SD15 = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, 'adm_in_channels': None, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, 'num_res_blocks': [2, 2, 2, 2], 'transformer_depth': [1, 1, 1, 1, 1, 1, 0, 0], 'channel_mult': [1, 2, 4, 4], 'transformer_depth_middle': 1, 'use_linear_in_transformer': False, 'context_dim': 768, 'num_heads': 8, 'transformer_depth_output': [1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0], 'use_temporal_attention': False, 'use_temporal_resblock': False} SDXL_mid_cnet = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, 'num_classes': 'sequential', 'adm_in_channels': 2816, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 0, 0, 1, 1], 'channel_mult': [1, 2, 4], 'transformer_depth_middle': 1, 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, 'transformer_depth_output': [0, 0, 0, 0, 0, 0, 1, 1, 1], 'use_temporal_attention': False, 'use_temporal_resblock': False} SDXL_small_cnet = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, 'num_classes': 'sequential', 'adm_in_channels': 2816, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 0, 0, 0, 0], 'channel_mult': [1, 2, 4], 'transformer_depth_middle': 0, 'use_linear_in_transformer': True, 'num_head_channels': 64, 'context_dim': 1, 'transformer_depth_output': [0, 0, 0, 0, 0, 0, 0, 0, 0], 'use_temporal_attention': False, 'use_temporal_resblock': False} SDXL_diffusers_inpaint = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, 'num_classes': 'sequential', 'adm_in_channels': 2816, 'dtype': dtype, 'in_channels': 9, 'model_channels': 320, 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 2, 2, 10, 10], 'channel_mult': [1, 2, 4], 'transformer_depth_middle': 10, 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, 'transformer_depth_output': [0, 0, 0, 2, 2, 2, 10, 10, 10], 'use_temporal_attention': False, 'use_temporal_resblock': False} SDXL_diffusers_ip2p = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, 'num_classes': 'sequential', 'adm_in_channels': 2816, 'dtype': dtype, 'in_channels': 8, 'model_channels': 320, 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 2, 2, 10, 10], 'channel_mult': [1, 2, 4], 'transformer_depth_middle': 10, 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, 'transformer_depth_output': [0, 0, 0, 2, 2, 2, 10, 10, 10], 'use_temporal_attention': False, 'use_temporal_resblock': False} SSD_1B = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, 'num_classes': 'sequential', 'adm_in_channels': 2816, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 2, 2, 4, 4], 'transformer_depth_output': [0, 0, 0, 1, 1, 2, 10, 4, 4], 'channel_mult': [1, 2, 4], 'transformer_depth_middle': -1, 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, 'use_temporal_attention': False, 'use_temporal_resblock': False} Segmind_Vega = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, 'num_classes': 'sequential', 'adm_in_channels': 2816, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 1, 1, 2, 2], 'transformer_depth_output': [0, 0, 0, 1, 1, 1, 2, 2, 2], 'channel_mult': [1, 2, 4], 'transformer_depth_middle': -1, 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, 'use_temporal_attention': False, 'use_temporal_resblock': False} KOALA_700M = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, 'num_classes': 'sequential', 'adm_in_channels': 2816, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, 'num_res_blocks': [1, 1, 1], 'transformer_depth': [0, 2, 5], 'transformer_depth_output': [0, 0, 2, 2, 5, 5], 'channel_mult': [1, 2, 4], 'transformer_depth_middle': -2, 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, 'use_temporal_attention': False, 'use_temporal_resblock': False} KOALA_1B = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, 'num_classes': 'sequential', 'adm_in_channels': 2816, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, 'num_res_blocks': [1, 1, 1], 'transformer_depth': [0, 2, 6], 'transformer_depth_output': [0, 0, 2, 2, 6, 6], 'channel_mult': [1, 2, 4], 'transformer_depth_middle': 6, 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, 'use_temporal_attention': False, 'use_temporal_resblock': False} SD09_XS = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, 'adm_in_channels': None, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, 'num_res_blocks': [1, 1, 1], 'transformer_depth': [1, 1, 1], 'channel_mult': [1, 2, 4], 'transformer_depth_middle': -2, 'use_linear_in_transformer': True, 'context_dim': 1024, 'num_head_channels': 64, 'transformer_depth_output': [1, 1, 1, 1, 1, 1], 'use_temporal_attention': False, 'use_temporal_resblock': False, 'disable_self_attentions': [True, False, False]} SD_XS = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, 'adm_in_channels': None, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, 'num_res_blocks': [1, 1, 1], 'transformer_depth': [0, 1, 1], 'channel_mult': [1, 2, 4], 'transformer_depth_middle': -2, 'use_linear_in_transformer': False, 'context_dim': 768, 'num_head_channels': 64, 'transformer_depth_output': [0, 0, 1, 1, 1, 1], 'use_temporal_attention': False, 'use_temporal_resblock': False} supported_models = [SDXL, SDXL_refiner, SD21, SD15, SD21_uncliph, SD21_unclipl, SDXL_mid_cnet, SDXL_small_cnet, SDXL_diffusers_inpaint, SSD_1B, Segmind_Vega, KOALA_700M, KOALA_1B, SD09_XS, SD_XS, SDXL_diffusers_ip2p] for unet_config in supported_models: matches = True for k in match: if match[k] != unet_config[k]: matches = False break if matches: return convert_config(unet_config) return None def model_config_from_diffusers_unet(state_dict): unet_config = unet_config_from_diffusers_unet(state_dict) if unet_config is not None: return model_config_from_unet_config(unet_config) return None def convert_diffusers_mmdit(state_dict, output_prefix=""): depth = count_blocks(state_dict, 'transformer_blocks.{}.') if depth > 0: out_sd = {} sd_map = comfy.utils.mmdit_to_diffusers({"depth": depth}, output_prefix=output_prefix) for k in sd_map: weight = state_dict.get(k, None) if weight is not None: t = sd_map[k] if not isinstance(t, str): if len(t) > 2: fun = t[2] else: fun = lambda a: a offset = t[1] if offset is not None: old_weight = out_sd.get(t[0], None) if old_weight is None: old_weight = torch.empty_like(weight) old_weight = old_weight.repeat([3] + [1] * (len(old_weight.shape) - 1)) w = old_weight.narrow(offset[0], offset[1], offset[2]) else: old_weight = weight w = weight w[:] = fun(weight) t = t[0] out_sd[t] = old_weight else: out_sd[t] = weight state_dict.pop(k) return out_sd
import psutil import logging from enum import Enum from comfy.cli_args import args import torch import sys import platform class VRAMState(Enum): DISABLED = 0 NO_VRAM = 1 LOW_VRAM = 2 NORMAL_VRAM = 3 HIGH_VRAM = 4 SHARED = 5 class CPUState(Enum): GPU = 0 CPU = 1 MPS = 2 vram_state = VRAMState.NORMAL_VRAM set_vram_to = VRAMState.NORMAL_VRAM cpu_state = CPUState.GPU total_vram = 0 lowvram_available = True xpu_available = False if args.deterministic: logging.info("Using deterministic algorithms for pytorch") torch.use_deterministic_algorithms(True, warn_only=True) directml_enabled = False if args.directml is not None: import torch_directml directml_enabled = True device_index = args.directml if device_index < 0: directml_device = torch_directml.device() else: directml_device = torch_directml.device(device_index) logging.info("Using directml with device: {}".format(torch_directml.device_name(device_index))) lowvram_available = False try: import intel_extension_for_pytorch as ipex if torch.xpu.is_available(): xpu_available = True except: pass try: if torch.backends.mps.is_available(): cpu_state = CPUState.MPS import torch.mps except: pass if args.cpu: cpu_state = CPUState.CPU def is_intel_xpu(): global cpu_state global xpu_available if cpu_state == CPUState.GPU: if xpu_available: return True return False def get_torch_device(): global directml_enabled global cpu_state if directml_enabled: global directml_device return directml_device if cpu_state == CPUState.MPS: return torch.device("mps") if cpu_state == CPUState.CPU: return torch.device("cpu") else: if is_intel_xpu(): return torch.device("xpu", torch.xpu.current_device()) else: return torch.device(torch.cuda.current_device()) def get_total_memory(dev=None, torch_total_too=False): global directml_enabled if dev is None: dev = get_torch_device() if hasattr(dev, 'type') and (dev.type == 'cpu' or dev.type == 'mps'): mem_total = psutil.virtual_memory().total mem_total_torch = mem_total else: if directml_enabled: mem_total = 1024 * 1024 * 1024 mem_total_torch = mem_total elif is_intel_xpu(): stats = torch.xpu.memory_stats(dev) mem_reserved = stats['reserved_bytes.all.current'] mem_total_torch = mem_reserved mem_total = torch.xpu.get_device_properties(dev).total_memory else: stats = torch.cuda.memory_stats(dev) mem_reserved = stats['reserved_bytes.all.current'] _, mem_total_cuda = torch.cuda.mem_get_info(dev) mem_total_torch = mem_reserved mem_total = mem_total_cuda if torch_total_too: return (mem_total, mem_total_torch) else: return mem_total total_vram = get_total_memory(get_torch_device()) / (1024 * 1024) total_ram = psutil.virtual_memory().total / (1024 * 1024) logging.info("Total VRAM {:0.0f} MB, total RAM {:0.0f} MB".format(total_vram, total_ram)) try: logging.info("pytorch version: {}".format(torch.version.__version__)) except: pass try: OOM_EXCEPTION = torch.cuda.OutOfMemoryError except: OOM_EXCEPTION = Exception XFORMERS_VERSION = "" XFORMERS_ENABLED_VAE = True if args.disable_xformers: XFORMERS_IS_AVAILABLE = False else: try: import xformers import xformers.ops XFORMERS_IS_AVAILABLE = True try: XFORMERS_IS_AVAILABLE = xformers._has_cpp_library except: pass try: XFORMERS_VERSION = xformers.version.__version__ logging.info("xformers version: {}".format(XFORMERS_VERSION)) if XFORMERS_VERSION.startswith("0.0.18"): logging.warning("\nWARNING: This version of xformers has a major bug where you will get black images when generating high resolution images.") logging.warning("Please downgrade or upgrade xformers to a different version.\n") XFORMERS_ENABLED_VAE = False except: pass except: XFORMERS_IS_AVAILABLE = False def is_nvidia(): global cpu_state if cpu_state == CPUState.GPU: if torch.version.cuda: return True return False ENABLE_PYTORCH_ATTENTION = False if args.use_pytorch_cross_attention: ENABLE_PYTORCH_ATTENTION = True XFORMERS_IS_AVAILABLE = False VAE_DTYPES = [torch.float32] try: if is_nvidia(): torch_version = torch.version.__version__ if int(torch_version[0]) >= 2: if ENABLE_PYTORCH_ATTENTION == False and args.use_split_cross_attention == False and args.use_quad_cross_attention == False: ENABLE_PYTORCH_ATTENTION = True if torch.cuda.is_bf16_supported() and torch.cuda.get_device_properties(torch.cuda.current_device()).major >= 8: VAE_DTYPES = [torch.bfloat16] + VAE_DTYPES if is_intel_xpu(): if args.use_split_cross_attention == False and args.use_quad_cross_attention == False: ENABLE_PYTORCH_ATTENTION = True except: pass if is_intel_xpu(): VAE_DTYPES = [torch.bfloat16] + VAE_DTYPES if args.cpu_vae: VAE_DTYPES = [torch.float32] if ENABLE_PYTORCH_ATTENTION: torch.backends.cuda.enable_math_sdp(True) torch.backends.cuda.enable_flash_sdp(True) torch.backends.cuda.enable_mem_efficient_sdp(True) if args.lowvram: set_vram_to = VRAMState.LOW_VRAM lowvram_available = True elif args.novram: set_vram_to = VRAMState.NO_VRAM elif args.highvram or args.gpu_only: vram_state = VRAMState.HIGH_VRAM FORCE_FP32 = False FORCE_FP16 = False if args.force_fp32: logging.info("Forcing FP32, if this improves things please report it.") FORCE_FP32 = True if args.force_fp16: logging.info("Forcing FP16.") FORCE_FP16 = True if lowvram_available: if set_vram_to in (VRAMState.LOW_VRAM, VRAMState.NO_VRAM): vram_state = set_vram_to if cpu_state != CPUState.GPU: vram_state = VRAMState.DISABLED if cpu_state == CPUState.MPS: vram_state = VRAMState.SHARED logging.info(f"Set vram state to: {vram_state.name}") DISABLE_SMART_MEMORY = args.disable_smart_memory if DISABLE_SMART_MEMORY: logging.info("Disabling smart memory management") def get_torch_device_name(device): if hasattr(device, 'type'): if device.type == "cuda": try: allocator_backend = torch.cuda.get_allocator_backend() except: allocator_backend = "" return "{} {} : {}".format(device, torch.cuda.get_device_name(device), allocator_backend) else: return "{}".format(device.type) elif is_intel_xpu(): return "{} {}".format(device, torch.xpu.get_device_name(device)) else: return "CUDA {}: {}".format(device, torch.cuda.get_device_name(device)) try: logging.info("Device: {}".format(get_torch_device_name(get_torch_device()))) except: logging.warning("Could not pick default device.") current_loaded_models = [] def module_size(module): module_mem = 0 sd = module.state_dict() for k in sd: t = sd[k] module_mem += t.nelement() * t.element_size() return module_mem class LoadedModel: def __init__(self, model): self.model = model self.device = model.load_device self.weights_loaded = False self.real_model = None self.currently_used = True def model_memory(self): return self.model.model_size() def model_memory_required(self, device): if device == self.model.current_device: return 0 else: return self.model_memory() def model_load(self, lowvram_model_memory=0, force_patch_weights=False): patch_model_to = self.device self.model.model_patches_to(self.device) self.model.model_patches_to(self.model.model_dtype()) load_weights = not self.weights_loaded try: if lowvram_model_memory > 0 and load_weights: self.real_model = self.model.patch_model_lowvram(device_to=patch_model_to, lowvram_model_memory=lowvram_model_memory, force_patch_weights=force_patch_weights) else: self.real_model = self.model.patch_model(device_to=patch_model_to, patch_weights=load_weights) except Exception as e: self.model.unpatch_model(self.model.offload_device) self.model_unload() raise e if is_intel_xpu() and not args.disable_ipex_optimize: self.real_model = ipex.optimize(self.real_model.eval(), graph_mode=True, concat_linear=True) self.weights_loaded = True return self.real_model def should_reload_model(self, force_patch_weights=False): if force_patch_weights and self.model.lowvram_patch_counter > 0: return True return False def model_unload(self, unpatch_weights=True): self.model.unpatch_model(self.model.offload_device, unpatch_weights=unpatch_weights) self.model.model_patches_to(self.model.offload_device) self.weights_loaded = self.weights_loaded and not unpatch_weights self.real_model = None def __eq__(self, other): return self.model is other.model def minimum_inference_memory(): return (1024 * 1024 * 1024) def unload_model_clones(model, unload_weights_only=True, force_unload=True): to_unload = [] for i in range(len(current_loaded_models)): if model.is_clone(current_loaded_models[i].model): to_unload = [i] + to_unload if len(to_unload) == 0: return True same_weights = 0 for i in to_unload: if model.clone_has_same_weights(current_loaded_models[i].model): same_weights += 1 if same_weights == len(to_unload): unload_weight = False else: unload_weight = True if not force_unload: if unload_weights_only and unload_weight == False: return None for i in to_unload: logging.debug("unload clone {} {}".format(i, unload_weight)) current_loaded_models.pop(i).model_unload(unpatch_weights=unload_weight) return unload_weight def free_memory(memory_required, device, keep_loaded=[]): unloaded_model = [] can_unload = [] for i in range(len(current_loaded_models) -1, -1, -1): shift_model = current_loaded_models[i] if shift_model.device == device: if shift_model not in keep_loaded: can_unload.append((sys.getrefcount(shift_model.model), shift_model.model_memory(), i)) shift_model.currently_used = False for x in sorted(can_unload): i = x[-1] if not DISABLE_SMART_MEMORY: if get_free_memory(device) > memory_required: break current_loaded_models[i].model_unload() unloaded_model.append(i) for i in sorted(unloaded_model, reverse=True): current_loaded_models.pop(i) if len(unloaded_model) > 0: soft_empty_cache() else: if vram_state != VRAMState.HIGH_VRAM: mem_free_total, mem_free_torch = get_free_memory(device, torch_free_too=True) if mem_free_torch > mem_free_total * 0.25: soft_empty_cache() def load_models_gpu(models, memory_required=0, force_patch_weights=False): global vram_state inference_memory = minimum_inference_memory() extra_mem = max(inference_memory, memory_required) models = set(models) models_to_load = [] models_already_loaded = [] for x in models: loaded_model = LoadedModel(x) loaded = None try: loaded_model_index = current_loaded_models.index(loaded_model) except: loaded_model_index = None if loaded_model_index is not None: loaded = current_loaded_models[loaded_model_index] if loaded.should_reload_model(force_patch_weights=force_patch_weights): current_loaded_models.pop(loaded_model_index).model_unload(unpatch_weights=True) loaded = None else: loaded.currently_used = True models_already_loaded.append(loaded) if loaded is None: if hasattr(x, "model"): logging.info(f"Requested to load {x.model.__class__.__name__}") models_to_load.append(loaded_model) if len(models_to_load) == 0: devs = set(map(lambda a: a.device, models_already_loaded)) for d in devs: if d != torch.device("cpu"): free_memory(extra_mem, d, models_already_loaded) return logging.info(f"Loading {len(models_to_load)} new model{'s' if len(models_to_load) > 1 else ''}") total_memory_required = {} for loaded_model in models_to_load: if unload_model_clones(loaded_model.model, unload_weights_only=True, force_unload=False) == True: total_memory_required[loaded_model.device] = total_memory_required.get(loaded_model.device, 0) + loaded_model.model_memory_required(loaded_model.device) for device in total_memory_required: if device != torch.device("cpu"): free_memory(total_memory_required[device] * 1.3 + extra_mem, device, models_already_loaded) for loaded_model in models_to_load: weights_unloaded = unload_model_clones(loaded_model.model, unload_weights_only=False, force_unload=False) if weights_unloaded is not None: loaded_model.weights_loaded = not weights_unloaded for loaded_model in models_to_load: model = loaded_model.model torch_dev = model.load_device if is_device_cpu(torch_dev): vram_set_state = VRAMState.DISABLED else: vram_set_state = vram_state lowvram_model_memory = 0 if lowvram_available and (vram_set_state == VRAMState.LOW_VRAM or vram_set_state == VRAMState.NORMAL_VRAM): model_size = loaded_model.model_memory_required(torch_dev) current_free_mem = get_free_memory(torch_dev) lowvram_model_memory = int(max(64 * (1024 * 1024), (current_free_mem - 1024 * (1024 * 1024)) / 1.3 )) if model_size <= (current_free_mem - inference_memory): lowvram_model_memory = 0 if vram_set_state == VRAMState.NO_VRAM: lowvram_model_memory = 64 * 1024 * 1024 cur_loaded_model = loaded_model.model_load(lowvram_model_memory, force_patch_weights=force_patch_weights) current_loaded_models.insert(0, loaded_model) return def load_model_gpu(model): return load_models_gpu([model]) def loaded_models(only_currently_used=False): output = [] for m in current_loaded_models: if only_currently_used: if not m.currently_used: continue output.append(m.model) return output def cleanup_models(keep_clone_weights_loaded=False): to_delete = [] for i in range(len(current_loaded_models)): if sys.getrefcount(current_loaded_models[i].model) <= 2: if not keep_clone_weights_loaded: to_delete = [i] + to_delete elif sys.getrefcount(current_loaded_models[i].real_model) <= 3: to_delete = [i] + to_delete for i in to_delete: x = current_loaded_models.pop(i) x.model_unload() del x def dtype_size(dtype): dtype_size = 4 if dtype == torch.float16 or dtype == torch.bfloat16: dtype_size = 2 elif dtype == torch.float32: dtype_size = 4 else: try: dtype_size = dtype.itemsize except: pass return dtype_size def unet_offload_device(): if vram_state == VRAMState.HIGH_VRAM: return get_torch_device() else: return torch.device("cpu") def unet_inital_load_device(parameters, dtype): torch_dev = get_torch_device() if vram_state == VRAMState.HIGH_VRAM: return torch_dev cpu_dev = torch.device("cpu") if DISABLE_SMART_MEMORY: return cpu_dev model_size = dtype_size(dtype) * parameters mem_dev = get_free_memory(torch_dev) mem_cpu = get_free_memory(cpu_dev) if mem_dev > mem_cpu and model_size < mem_dev: return torch_dev else: return cpu_dev def unet_dtype(device=None, model_params=0, supported_dtypes=[torch.float16, torch.bfloat16, torch.float32]): if args.bf16_unet: return torch.bfloat16 if args.fp16_unet: return torch.float16 if args.fp8_e4m3fn_unet: return torch.float8_e4m3fn if args.fp8_e5m2_unet: return torch.float8_e5m2 if should_use_fp16(device=device, model_params=model_params, manual_cast=True): if torch.float16 in supported_dtypes: return torch.float16 if should_use_bf16(device, model_params=model_params, manual_cast=True): if torch.bfloat16 in supported_dtypes: return torch.bfloat16 return torch.float32 def unet_manual_cast(weight_dtype, inference_device, supported_dtypes=[torch.float16, torch.bfloat16, torch.float32]): if weight_dtype == torch.float32: return None fp16_supported = should_use_fp16(inference_device, prioritize_performance=False) if fp16_supported and weight_dtype == torch.float16: return None bf16_supported = should_use_bf16(inference_device) if bf16_supported and weight_dtype == torch.bfloat16: return None if fp16_supported and torch.float16 in supported_dtypes: return torch.float16 elif bf16_supported and torch.bfloat16 in supported_dtypes: return torch.bfloat16 else: return torch.float32 def text_encoder_offload_device(): if args.gpu_only: return get_torch_device() else: return torch.device("cpu") def text_encoder_device(): if args.gpu_only: return get_torch_device() elif vram_state == VRAMState.HIGH_VRAM or vram_state == VRAMState.NORMAL_VRAM: if should_use_fp16(prioritize_performance=False): return get_torch_device() else: return torch.device("cpu") else: return torch.device("cpu") def text_encoder_dtype(device=None): if args.fp8_e4m3fn_text_enc: return torch.float8_e4m3fn elif args.fp8_e5m2_text_enc: return torch.float8_e5m2 elif args.fp16_text_enc: return torch.float16 elif args.fp32_text_enc: return torch.float32 if is_device_cpu(device): return torch.float16 return torch.float16 def intermediate_device(): if args.gpu_only: return get_torch_device() else: return torch.device("cpu") def vae_device(): if args.cpu_vae: return torch.device("cpu") return get_torch_device() def vae_offload_device(): if args.gpu_only: return get_torch_device() else: return torch.device("cpu") def vae_dtype(device=None, allowed_dtypes=[]): global VAE_DTYPES if args.fp16_vae: return torch.float16 elif args.bf16_vae: return torch.bfloat16 elif args.fp32_vae: return torch.float32 for d in allowed_dtypes: if d == torch.float16 and should_use_fp16(device, prioritize_performance=False): return d if d in VAE_DTYPES: return d return VAE_DTYPES[0] def get_autocast_device(dev): if hasattr(dev, 'type'): return dev.type return "cuda" def supports_dtype(device, dtype): if dtype == torch.float32: return True if is_device_cpu(device): return False if dtype == torch.float16: return True if dtype == torch.bfloat16: return True return False def supports_cast(device, dtype): if dtype == torch.float32: return True if dtype == torch.float16: return True if is_device_mps(device): return False if directml_enabled: return False if dtype == torch.bfloat16: return True if dtype == torch.float8_e4m3fn: return True if dtype == torch.float8_e5m2: return True return False def device_supports_non_blocking(device): if is_device_mps(device): return False if is_intel_xpu(): return False if args.deterministic: return False if directml_enabled: return False return True def device_should_use_non_blocking(device): if not device_supports_non_blocking(device): return False return False def force_channels_last(): if args.force_channels_last: return True return False def cast_to_device(tensor, device, dtype, copy=False): device_supports_cast = False if tensor.dtype == torch.float32 or tensor.dtype == torch.float16: device_supports_cast = True elif tensor.dtype == torch.bfloat16: if hasattr(device, 'type') and device.type.startswith("cuda"): device_supports_cast = True elif is_intel_xpu(): device_supports_cast = True non_blocking = device_should_use_non_blocking(device) if device_supports_cast: if copy: if tensor.device == device: return tensor.to(dtype, copy=copy, non_blocking=non_blocking) return tensor.to(device, copy=copy, non_blocking=non_blocking).to(dtype, non_blocking=non_blocking) else: return tensor.to(device, non_blocking=non_blocking).to(dtype, non_blocking=non_blocking) else: return tensor.to(device, dtype, copy=copy, non_blocking=non_blocking) def xformers_enabled(): global directml_enabled global cpu_state if cpu_state != CPUState.GPU: return False if is_intel_xpu(): return False if directml_enabled: return False return XFORMERS_IS_AVAILABLE def xformers_enabled_vae(): enabled = xformers_enabled() if not enabled: return False return XFORMERS_ENABLED_VAE def pytorch_attention_enabled(): global ENABLE_PYTORCH_ATTENTION return ENABLE_PYTORCH_ATTENTION def pytorch_attention_flash_attention(): global ENABLE_PYTORCH_ATTENTION if ENABLE_PYTORCH_ATTENTION: if is_nvidia(): return True if is_intel_xpu(): return True return False def force_upcast_attention_dtype(): upcast = args.force_upcast_attention try: if platform.mac_ver()[0] in ['14.5']: upcast = True except: pass if upcast: return torch.float32 else: return None def get_free_memory(dev=None, torch_free_too=False): global directml_enabled if dev is None: dev = get_torch_device() if hasattr(dev, 'type') and (dev.type == 'cpu' or dev.type == 'mps'): mem_free_total = psutil.virtual_memory().available mem_free_torch = mem_free_total else: if directml_enabled: mem_free_total = 1024 * 1024 * 1024 mem_free_torch = mem_free_total elif is_intel_xpu(): stats = torch.xpu.memory_stats(dev) mem_active = stats['active_bytes.all.current'] mem_reserved = stats['reserved_bytes.all.current'] mem_free_torch = mem_reserved - mem_active mem_free_xpu = torch.xpu.get_device_properties(dev).total_memory - mem_reserved mem_free_total = mem_free_xpu + mem_free_torch else: stats = torch.cuda.memory_stats(dev) mem_active = stats['active_bytes.all.current'] mem_reserved = stats['reserved_bytes.all.current'] mem_free_cuda, _ = torch.cuda.mem_get_info(dev) mem_free_torch = mem_reserved - mem_active mem_free_total = mem_free_cuda + mem_free_torch if torch_free_too: return (mem_free_total, mem_free_torch) else: return mem_free_total def cpu_mode(): global cpu_state return cpu_state == CPUState.CPU def mps_mode(): global cpu_state return cpu_state == CPUState.MPS def is_device_type(device, type): if hasattr(device, 'type'): if (device.type == type): return True return False def is_device_cpu(device): return is_device_type(device, 'cpu') def is_device_mps(device): return is_device_type(device, 'mps') def is_device_cuda(device): return is_device_type(device, 'cuda') def should_use_fp16(device=None, model_params=0, prioritize_performance=True, manual_cast=False): global directml_enabled if device is not None: if is_device_cpu(device): return False if FORCE_FP16: return True if device is not None: if is_device_mps(device): return True if FORCE_FP32: return False if directml_enabled: return False if mps_mode(): return True if cpu_mode(): return False if is_intel_xpu(): return True if torch.version.hip: return True props = torch.cuda.get_device_properties("cuda") if props.major >= 8: return True if props.major < 6: return False fp16_works = False nvidia_10_series = ["1080", "1070", "titan x", "p3000", "p3200", "p4000", "p4200", "p5000", "p5200", "p6000", "1060", "1050", "p40", "p100", "p6", "p4"] for x in nvidia_10_series: if x in props.name.lower(): fp16_works = True if fp16_works or manual_cast: free_model_memory = (get_free_memory() * 0.9 - minimum_inference_memory()) if (not prioritize_performance) or model_params * 4 > free_model_memory: return True if props.major < 7: return False nvidia_16_series = ["1660", "1650", "1630", "T500", "T550", "T600", "MX550", "MX450", "CMP 30HX", "T2000", "T1000", "T1200"] for x in nvidia_16_series: if x in props.name: return False return True def should_use_bf16(device=None, model_params=0, prioritize_performance=True, manual_cast=False): if device is not None: if is_device_cpu(device): return False if device is not None: if is_device_mps(device): return False if FORCE_FP32: return False if directml_enabled: return False if cpu_mode() or mps_mode(): return False if is_intel_xpu(): return True if device is None: device = torch.device("cuda") props = torch.cuda.get_device_properties(device) if props.major >= 8: return True bf16_works = torch.cuda.is_bf16_supported() if bf16_works or manual_cast: free_model_memory = (get_free_memory() * 0.9 - minimum_inference_memory()) if (not prioritize_performance) or model_params * 4 > free_model_memory: return True return False def soft_empty_cache(force=False): global cpu_state if cpu_state == CPUState.MPS: torch.mps.empty_cache() elif is_intel_xpu(): torch.xpu.empty_cache() elif torch.cuda.is_available(): if force or is_nvidia(): torch.cuda.empty_cache() torch.cuda.ipc_collect() def unload_all_models(): free_memory(1e30, get_torch_device()) def resolve_lowvram_weight(weight, model, key): print("WARNING: The comfy.model_management.resolve_lowvram_weight function will be removed soon, please stop using it.") return weight import threading class InterruptProcessingException(Exception): pass interrupt_processing_mutex = threading.RLock() interrupt_processing = False def interrupt_current_processing(value=True): global interrupt_processing global interrupt_processing_mutex with interrupt_processing_mutex: interrupt_processing = value def processing_interrupted(): global interrupt_processing global interrupt_processing_mutex with interrupt_processing_mutex: return interrupt_processing def throw_exception_if_processing_interrupted(): global interrupt_processing global interrupt_processing_mutex with interrupt_processing_mutex: if interrupt_processing: interrupt_processing = False raise InterruptProcessingException()
import torch import copy import inspect import logging import uuid import comfy.utils import comfy.model_management from comfy.types import UnetWrapperFunction def weight_decompose(dora_scale, weight, lora_diff, alpha, strength): dora_scale = comfy.model_management.cast_to_device(dora_scale, weight.device, torch.float32) lora_diff *= alpha weight_calc = weight + lora_diff.type(weight.dtype) weight_norm = ( weight_calc.transpose(0, 1) .reshape(weight_calc.shape[1], -1) .norm(dim=1, keepdim=True) .reshape(weight_calc.shape[1], *[1] * (weight_calc.dim() - 1)) .transpose(0, 1) ) weight_calc *= (dora_scale / weight_norm).type(weight.dtype) if strength != 1.0: weight_calc -= weight weight += strength * (weight_calc) else: weight[:] = weight_calc return weight def set_model_options_patch_replace(model_options, patch, name, block_name, number, transformer_index=None): to = model_options["transformer_options"].copy() if "patches_replace" not in to: to["patches_replace"] = {} else: to["patches_replace"] = to["patches_replace"].copy() if name not in to["patches_replace"]: to["patches_replace"][name] = {} else: to["patches_replace"][name] = to["patches_replace"][name].copy() if transformer_index is not None: block = (block_name, number, transformer_index) else: block = (block_name, number) to["patches_replace"][name][block] = patch model_options["transformer_options"] = to return model_options class ModelPatcher: def __init__(self, model, load_device, offload_device, size=0, current_device=None, weight_inplace_update=False): self.size = size self.model = model self.patches = {} self.backup = {} self.object_patches = {} self.object_patches_backup = {} self.model_options = {"transformer_options":{}} self.model_size() self.load_device = load_device self.offload_device = offload_device if current_device is None: self.current_device = self.offload_device else: self.current_device = current_device self.weight_inplace_update = weight_inplace_update self.model_lowvram = False self.lowvram_patch_counter = 0 self.patches_uuid = uuid.uuid4() def model_size(self): if self.size > 0: return self.size self.size = comfy.model_management.module_size(self.model) return self.size def clone(self): n = ModelPatcher(self.model, self.load_device, self.offload_device, self.size, self.current_device, weight_inplace_update=self.weight_inplace_update) n.patches = {} for k in self.patches: n.patches[k] = self.patches[k][:] n.patches_uuid = self.patches_uuid n.object_patches = self.object_patches.copy() n.model_options = copy.deepcopy(self.model_options) n.backup = self.backup n.object_patches_backup = self.object_patches_backup return n def is_clone(self, other): if hasattr(other, 'model') and self.model is other.model: return True return False def clone_has_same_weights(self, clone): if not self.is_clone(clone): return False if len(self.patches) == 0 and len(clone.patches) == 0: return True if self.patches_uuid == clone.patches_uuid: if len(self.patches) != len(clone.patches): logging.warning("WARNING: something went wrong, same patch uuid but different length of patches.") else: return True def memory_required(self, input_shape): return self.model.memory_required(input_shape=input_shape) def set_model_sampler_cfg_function(self, sampler_cfg_function, disable_cfg1_optimization=False): if len(inspect.signature(sampler_cfg_function).parameters) == 3: self.model_options["sampler_cfg_function"] = lambda args: sampler_cfg_function(args["cond"], args["uncond"], args["cond_scale"]) else: self.model_options["sampler_cfg_function"] = sampler_cfg_function if disable_cfg1_optimization: self.model_options["disable_cfg1_optimization"] = True def set_model_sampler_post_cfg_function(self, post_cfg_function, disable_cfg1_optimization=False): self.model_options["sampler_post_cfg_function"] = self.model_options.get("sampler_post_cfg_function", []) + [post_cfg_function] if disable_cfg1_optimization: self.model_options["disable_cfg1_optimization"] = True def set_model_unet_function_wrapper(self, unet_wrapper_function: UnetWrapperFunction): self.model_options["model_function_wrapper"] = unet_wrapper_function def set_model_denoise_mask_function(self, denoise_mask_function): self.model_options["denoise_mask_function"] = denoise_mask_function def set_model_patch(self, patch, name): to = self.model_options["transformer_options"] if "patches" not in to: to["patches"] = {} to["patches"][name] = to["patches"].get(name, []) + [patch] def set_model_patch_replace(self, patch, name, block_name, number, transformer_index=None): self.model_options = set_model_options_patch_replace(self.model_options, patch, name, block_name, number, transformer_index=transformer_index) def set_model_attn1_patch(self, patch): self.set_model_patch(patch, "attn1_patch") def set_model_attn2_patch(self, patch): self.set_model_patch(patch, "attn2_patch") def set_model_attn1_replace(self, patch, block_name, number, transformer_index=None): self.set_model_patch_replace(patch, "attn1", block_name, number, transformer_index) def set_model_attn2_replace(self, patch, block_name, number, transformer_index=None): self.set_model_patch_replace(patch, "attn2", block_name, number, transformer_index) def set_model_attn1_output_patch(self, patch): self.set_model_patch(patch, "attn1_output_patch") def set_model_attn2_output_patch(self, patch): self.set_model_patch(patch, "attn2_output_patch") def set_model_input_block_patch(self, patch): self.set_model_patch(patch, "input_block_patch") def set_model_input_block_patch_after_skip(self, patch): self.set_model_patch(patch, "input_block_patch_after_skip") def set_model_output_block_patch(self, patch): self.set_model_patch(patch, "output_block_patch") def add_object_patch(self, name, obj): self.object_patches[name] = obj def get_model_object(self, name): if name in self.object_patches: return self.object_patches[name] else: if name in self.object_patches_backup: return self.object_patches_backup[name] else: return comfy.utils.get_attr(self.model, name) def model_patches_to(self, device): to = self.model_options["transformer_options"] if "patches" in to: patches = to["patches"] for name in patches: patch_list = patches[name] for i in range(len(patch_list)): if hasattr(patch_list[i], "to"): patch_list[i] = patch_list[i].to(device) if "patches_replace" in to: patches = to["patches_replace"] for name in patches: patch_list = patches[name] for k in patch_list: if hasattr(patch_list[k], "to"): patch_list[k] = patch_list[k].to(device) if "model_function_wrapper" in self.model_options: wrap_func = self.model_options["model_function_wrapper"] if hasattr(wrap_func, "to"): self.model_options["model_function_wrapper"] = wrap_func.to(device) def model_dtype(self): if hasattr(self.model, "get_dtype"): return self.model.get_dtype() def add_patches(self, patches, strength_patch=1.0, strength_model=1.0): p = set() model_sd = self.model.state_dict() for k in patches: offset = None function = None if isinstance(k, str): key = k else: offset = k[1] key = k[0] if len(k) > 2: function = k[2] if key in model_sd: p.add(k) current_patches = self.patches.get(key, []) current_patches.append((strength_patch, patches[k], strength_model, offset, function)) self.patches[key] = current_patches self.patches_uuid = uuid.uuid4() return list(p) def get_key_patches(self, filter_prefix=None): comfy.model_management.unload_model_clones(self) model_sd = self.model_state_dict() p = {} for k in model_sd: if filter_prefix is not None: if not k.startswith(filter_prefix): continue if k in self.patches: p[k] = [model_sd[k]] + self.patches[k] else: p[k] = (model_sd[k],) return p def model_state_dict(self, filter_prefix=None): sd = self.model.state_dict() keys = list(sd.keys()) if filter_prefix is not None: for k in keys: if not k.startswith(filter_prefix): sd.pop(k) return sd def patch_weight_to_device(self, key, device_to=None): if key not in self.patches: return weight = comfy.utils.get_attr(self.model, key) inplace_update = self.weight_inplace_update if key not in self.backup: self.backup[key] = weight.to(device=self.offload_device, copy=inplace_update) if device_to is not None: temp_weight = comfy.model_management.cast_to_device(weight, device_to, torch.float32, copy=True) else: temp_weight = weight.to(torch.float32, copy=True) out_weight = self.calculate_weight(self.patches[key], temp_weight, key).to(weight.dtype) if inplace_update: comfy.utils.copy_to_param(self.model, key, out_weight) else: comfy.utils.set_attr_param(self.model, key, out_weight) def patch_model(self, device_to=None, patch_weights=True): for k in self.object_patches: old = comfy.utils.set_attr(self.model, k, self.object_patches[k]) if k not in self.object_patches_backup: self.object_patches_backup[k] = old if patch_weights: model_sd = self.model_state_dict() for key in self.patches: if key not in model_sd: logging.warning("could not patch. key doesn't exist in model: {}".format(key)) continue self.patch_weight_to_device(key, device_to) if device_to is not None: self.model.to(device_to) self.current_device = device_to return self.model def patch_model_lowvram(self, device_to=None, lowvram_model_memory=0, force_patch_weights=False): self.patch_model(device_to, patch_weights=False) logging.info("loading in lowvram mode {}".format(lowvram_model_memory/(1024 * 1024))) class LowVramPatch: def __init__(self, key, model_patcher): self.key = key self.model_patcher = model_patcher def __call__(self, weight): return self.model_patcher.calculate_weight(self.model_patcher.patches[self.key], weight, self.key) mem_counter = 0 patch_counter = 0 for n, m in self.model.named_modules(): lowvram_weight = False if hasattr(m, "comfy_cast_weights"): module_mem = comfy.model_management.module_size(m) if mem_counter + module_mem >= lowvram_model_memory: lowvram_weight = True weight_key = "{}.weight".format(n) bias_key = "{}.bias".format(n) if lowvram_weight: if weight_key in self.patches: if force_patch_weights: self.patch_weight_to_device(weight_key) else: m.weight_function = LowVramPatch(weight_key, self) patch_counter += 1 if bias_key in self.patches: if force_patch_weights: self.patch_weight_to_device(bias_key) else: m.bias_function = LowVramPatch(bias_key, self) patch_counter += 1 m.prev_comfy_cast_weights = m.comfy_cast_weights m.comfy_cast_weights = True else: if hasattr(m, "weight"): self.patch_weight_to_device(weight_key, device_to) self.patch_weight_to_device(bias_key, device_to) m.to(device_to) mem_counter += comfy.model_management.module_size(m) logging.debug("lowvram: loaded module regularly {} {}".format(n, m)) self.model_lowvram = True self.lowvram_patch_counter = patch_counter return self.model def calculate_weight(self, patches, weight, key): for p in patches: strength = p[0] v = p[1] strength_model = p[2] offset = p[3] function = p[4] if function is None: function = lambda a: a old_weight = None if offset is not None: old_weight = weight weight = weight.narrow(offset[0], offset[1], offset[2]) if strength_model != 1.0: weight *= strength_model if isinstance(v, list): v = (self.calculate_weight(v[1:], v[0].clone(), key), ) if len(v) == 1: patch_type = "diff" elif len(v) == 2: patch_type = v[0] v = v[1] if patch_type == "diff": w1 = v[0] if strength != 0.0: if w1.shape != weight.shape: logging.warning("WARNING SHAPE MISMATCH {} WEIGHT NOT MERGED {} != {}".format(key, w1.shape, weight.shape)) else: weight += function(strength * comfy.model_management.cast_to_device(w1, weight.device, weight.dtype)) elif patch_type == "lora": mat1 = comfy.model_management.cast_to_device(v[0], weight.device, torch.float32) mat2 = comfy.model_management.cast_to_device(v[1], weight.device, torch.float32) dora_scale = v[4] if v[2] is not None: alpha = v[2] / mat2.shape[0] else: alpha = 1.0 if v[3] is not None: mat3 = comfy.model_management.cast_to_device(v[3], weight.device, torch.float32) final_shape = [mat2.shape[1], mat2.shape[0], mat3.shape[2], mat3.shape[3]] mat2 = torch.mm(mat2.transpose(0, 1).flatten(start_dim=1), mat3.transpose(0, 1).flatten(start_dim=1)).reshape(final_shape).transpose(0, 1) try: lora_diff = torch.mm(mat1.flatten(start_dim=1), mat2.flatten(start_dim=1)).reshape(weight.shape) if dora_scale is not None: weight = function(weight_decompose(dora_scale, weight, lora_diff, alpha, strength)) else: weight += function(((strength * alpha) * lora_diff).type(weight.dtype)) except Exception as e: logging.error("ERROR {} {} {}".format(patch_type, key, e)) elif patch_type == "lokr": w1 = v[0] w2 = v[1] w1_a = v[3] w1_b = v[4] w2_a = v[5] w2_b = v[6] t2 = v[7] dora_scale = v[8] dim = None if w1 is None: dim = w1_b.shape[0] w1 = torch.mm(comfy.model_management.cast_to_device(w1_a, weight.device, torch.float32), comfy.model_management.cast_to_device(w1_b, weight.device, torch.float32)) else: w1 = comfy.model_management.cast_to_device(w1, weight.device, torch.float32) if w2 is None: dim = w2_b.shape[0] if t2 is None: w2 = torch.mm(comfy.model_management.cast_to_device(w2_a, weight.device, torch.float32), comfy.model_management.cast_to_device(w2_b, weight.device, torch.float32)) else: w2 = torch.einsum('i j k l, j r, i p -> p r k l', comfy.model_management.cast_to_device(t2, weight.device, torch.float32), comfy.model_management.cast_to_device(w2_b, weight.device, torch.float32), comfy.model_management.cast_to_device(w2_a, weight.device, torch.float32)) else: w2 = comfy.model_management.cast_to_device(w2, weight.device, torch.float32) if len(w2.shape) == 4: w1 = w1.unsqueeze(2).unsqueeze(2) if v[2] is not None and dim is not None: alpha = v[2] / dim else: alpha = 1.0 try: lora_diff = torch.kron(w1, w2).reshape(weight.shape) if dora_scale is not None: weight = function(weight_decompose(dora_scale, weight, lora_diff, alpha, strength)) else: weight += function(((strength * alpha) * lora_diff).type(weight.dtype)) except Exception as e: logging.error("ERROR {} {} {}".format(patch_type, key, e)) elif patch_type == "loha": w1a = v[0] w1b = v[1] if v[2] is not None: alpha = v[2] / w1b.shape[0] else: alpha = 1.0 w2a = v[3] w2b = v[4] dora_scale = v[7] if v[5] is not None: t1 = v[5] t2 = v[6] m1 = torch.einsum('i j k l, j r, i p -> p r k l', comfy.model_management.cast_to_device(t1, weight.device, torch.float32), comfy.model_management.cast_to_device(w1b, weight.device, torch.float32), comfy.model_management.cast_to_device(w1a, weight.device, torch.float32)) m2 = torch.einsum('i j k l, j r, i p -> p r k l', comfy.model_management.cast_to_device(t2, weight.device, torch.float32), comfy.model_management.cast_to_device(w2b, weight.device, torch.float32), comfy.model_management.cast_to_device(w2a, weight.device, torch.float32)) else: m1 = torch.mm(comfy.model_management.cast_to_device(w1a, weight.device, torch.float32), comfy.model_management.cast_to_device(w1b, weight.device, torch.float32)) m2 = torch.mm(comfy.model_management.cast_to_device(w2a, weight.device, torch.float32), comfy.model_management.cast_to_device(w2b, weight.device, torch.float32)) try: lora_diff = (m1 * m2).reshape(weight.shape) if dora_scale is not None: weight = function(weight_decompose(dora_scale, weight, lora_diff, alpha, strength)) else: weight += function(((strength * alpha) * lora_diff).type(weight.dtype)) except Exception as e: logging.error("ERROR {} {} {}".format(patch_type, key, e)) elif patch_type == "glora": if v[4] is not None: alpha = v[4] / v[0].shape[0] else: alpha = 1.0 dora_scale = v[5] a1 = comfy.model_management.cast_to_device(v[0].flatten(start_dim=1), weight.device, torch.float32) a2 = comfy.model_management.cast_to_device(v[1].flatten(start_dim=1), weight.device, torch.float32) b1 = comfy.model_management.cast_to_device(v[2].flatten(start_dim=1), weight.device, torch.float32) b2 = comfy.model_management.cast_to_device(v[3].flatten(start_dim=1), weight.device, torch.float32) try: lora_diff = (torch.mm(b2, b1) + torch.mm(torch.mm(weight.flatten(start_dim=1), a2), a1)).reshape(weight.shape) if dora_scale is not None: weight = function(weight_decompose(dora_scale, weight, lora_diff, alpha, strength)) else: weight += function(((strength * alpha) * lora_diff).type(weight.dtype)) except Exception as e: logging.error("ERROR {} {} {}".format(patch_type, key, e)) else: logging.warning("patch type not recognized {} {}".format(patch_type, key)) if old_weight is not None: weight = old_weight return weight def unpatch_model(self, device_to=None, unpatch_weights=True): if unpatch_weights: if self.model_lowvram: for m in self.model.modules(): if hasattr(m, "prev_comfy_cast_weights"): m.comfy_cast_weights = m.prev_comfy_cast_weights del m.prev_comfy_cast_weights m.weight_function = None m.bias_function = None self.model_lowvram = False self.lowvram_patch_counter = 0 keys = list(self.backup.keys()) if self.weight_inplace_update: for k in keys: comfy.utils.copy_to_param(self.model, k, self.backup[k]) else: for k in keys: comfy.utils.set_attr_param(self.model, k, self.backup[k]) self.backup.clear() if device_to is not None: self.model.to(device_to) self.current_device = device_to keys = list(self.object_patches_backup.keys()) for k in keys: comfy.utils.set_attr(self.model, k, self.object_patches_backup[k]) self.object_patches_backup.clear()
import torch from comfy.ldm.modules.diffusionmodules.util import make_beta_schedule import math class EPS: def calculate_input(self, sigma, noise): sigma = sigma.view(sigma.shape[:1] + (1,) * (noise.ndim - 1)) return noise / (sigma ** 2 + self.sigma_data ** 2) ** 0.5 def calculate_denoised(self, sigma, model_output, model_input): sigma = sigma.view(sigma.shape[:1] + (1,) * (model_output.ndim - 1)) return model_input - model_output * sigma def noise_scaling(self, sigma, noise, latent_image, max_denoise=False): if max_denoise: noise = noise * torch.sqrt(1.0 + sigma ** 2.0) else: noise = noise * sigma noise += latent_image return noise def inverse_noise_scaling(self, sigma, latent): return latent class V_PREDICTION(EPS): def calculate_denoised(self, sigma, model_output, model_input): sigma = sigma.view(sigma.shape[:1] + (1,) * (model_output.ndim - 1)) return model_input * self.sigma_data ** 2 / (sigma ** 2 + self.sigma_data ** 2) - model_output * sigma * self.sigma_data / (sigma ** 2 + self.sigma_data ** 2) ** 0.5 class EDM(V_PREDICTION): def calculate_denoised(self, sigma, model_output, model_input): sigma = sigma.view(sigma.shape[:1] + (1,) * (model_output.ndim - 1)) return model_input * self.sigma_data ** 2 / (sigma ** 2 + self.sigma_data ** 2) + model_output * sigma * self.sigma_data / (sigma ** 2 + self.sigma_data ** 2) ** 0.5 class CONST: def calculate_input(self, sigma, noise): return noise def calculate_denoised(self, sigma, model_output, model_input): sigma = sigma.view(sigma.shape[:1] + (1,) * (model_output.ndim - 1)) return model_input - model_output * sigma def noise_scaling(self, sigma, noise, latent_image, max_denoise=False): return sigma * noise + (1.0 - sigma) * latent_image def inverse_noise_scaling(self, sigma, latent): return latent / (1.0 - sigma) class ModelSamplingDiscrete(torch.nn.Module): def __init__(self, model_config=None): super().__init__() if model_config is not None: sampling_settings = model_config.sampling_settings else: sampling_settings = {} beta_schedule = sampling_settings.get("beta_schedule", "linear") linear_start = sampling_settings.get("linear_start", 0.00085) linear_end = sampling_settings.get("linear_end", 0.012) self._register_schedule(given_betas=None, beta_schedule=beta_schedule, timesteps=1000, linear_start=linear_start, linear_end=linear_end, cosine_s=8e-3) self.sigma_data = 1.0 def _register_schedule(self, given_betas=None, beta_schedule="linear", timesteps=1000, linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3): if given_betas is not None: betas = given_betas else: betas = make_beta_schedule(beta_schedule, timesteps, linear_start=linear_start, linear_end=linear_end, cosine_s=cosine_s) alphas = 1. - betas alphas_cumprod = torch.cumprod(alphas, dim=0) timesteps, = betas.shape self.num_timesteps = int(timesteps) self.linear_start = linear_start self.linear_end = linear_end sigmas = ((1 - alphas_cumprod) / alphas_cumprod) ** 0.5 self.set_sigmas(sigmas) def set_sigmas(self, sigmas): self.register_buffer('sigmas', sigmas.float()) self.register_buffer('log_sigmas', sigmas.log().float()) @property def sigma_min(self): return self.sigmas[0] @property def sigma_max(self): return self.sigmas[-1] def timestep(self, sigma): log_sigma = sigma.log() dists = log_sigma.to(self.log_sigmas.device) - self.log_sigmas[:, None] return dists.abs().argmin(dim=0).view(sigma.shape).to(sigma.device) def sigma(self, timestep): t = torch.clamp(timestep.float().to(self.log_sigmas.device), min=0, max=(len(self.sigmas) - 1)) low_idx = t.floor().long() high_idx = t.ceil().long() w = t.frac() log_sigma = (1 - w) * self.log_sigmas[low_idx] + w * self.log_sigmas[high_idx] return log_sigma.exp().to(timestep.device) def percent_to_sigma(self, percent): if percent <= 0.0: return 999999999.9 if percent >= 1.0: return 0.0 percent = 1.0 - percent return self.sigma(torch.tensor(percent * 999.0)).item() class ModelSamplingDiscreteEDM(ModelSamplingDiscrete): def timestep(self, sigma): return 0.25 * sigma.log() def sigma(self, timestep): return (timestep / 0.25).exp() class ModelSamplingContinuousEDM(torch.nn.Module): def __init__(self, model_config=None): super().__init__() if model_config is not None: sampling_settings = model_config.sampling_settings else: sampling_settings = {} sigma_min = sampling_settings.get("sigma_min", 0.002) sigma_max = sampling_settings.get("sigma_max", 120.0) sigma_data = sampling_settings.get("sigma_data", 1.0) self.set_parameters(sigma_min, sigma_max, sigma_data) def set_parameters(self, sigma_min, sigma_max, sigma_data): self.sigma_data = sigma_data sigmas = torch.linspace(math.log(sigma_min), math.log(sigma_max), 1000).exp() self.register_buffer('sigmas', sigmas) self.register_buffer('log_sigmas', sigmas.log()) @property def sigma_min(self): return self.sigmas[0] @property def sigma_max(self): return self.sigmas[-1] def timestep(self, sigma): return 0.25 * sigma.log() def sigma(self, timestep): return (timestep / 0.25).exp() def percent_to_sigma(self, percent): if percent <= 0.0: return 999999999.9 if percent >= 1.0: return 0.0 percent = 1.0 - percent log_sigma_min = math.log(self.sigma_min) return math.exp((math.log(self.sigma_max) - log_sigma_min) * percent + log_sigma_min) class ModelSamplingContinuousV(ModelSamplingContinuousEDM): def timestep(self, sigma): return sigma.atan() / math.pi * 2 def sigma(self, timestep): return (timestep * math.pi / 2).tan() def time_snr_shift(alpha, t): if alpha == 1.0: return t return alpha * t / (1 + (alpha - 1) * t) class ModelSamplingDiscreteFlow(torch.nn.Module): def __init__(self, model_config=None): super().__init__() if model_config is not None: sampling_settings = model_config.sampling_settings else: sampling_settings = {} self.set_parameters(shift=sampling_settings.get("shift", 1.0)) def set_parameters(self, shift=1.0, timesteps=1000): self.shift = shift ts = self.sigma(torch.arange(1, timesteps + 1, 1)) self.register_buffer('sigmas', ts) @property def sigma_min(self): return self.sigmas[0] @property def sigma_max(self): return self.sigmas[-1] def timestep(self, sigma): return sigma * 1000 def sigma(self, timestep): return time_snr_shift(self.shift, timestep / 1000) def percent_to_sigma(self, percent): if percent <= 0.0: return 1.0 if percent >= 1.0: return 0.0 return 1.0 - percent class StableCascadeSampling(ModelSamplingDiscrete): def __init__(self, model_config=None): super().__init__() if model_config is not None: sampling_settings = model_config.sampling_settings else: sampling_settings = {} self.set_parameters(sampling_settings.get("shift", 1.0)) def set_parameters(self, shift=1.0, cosine_s=8e-3): self.shift = shift self.cosine_s = torch.tensor(cosine_s) self._init_alpha_cumprod = torch.cos(self.cosine_s / (1 + self.cosine_s) * torch.pi * 0.5) ** 2 self.num_timesteps = 10000 sigmas = torch.empty((self.num_timesteps), dtype=torch.float32) for x in range(self.num_timesteps): t = (x + 1) / self.num_timesteps sigmas[x] = self.sigma(t) self.set_sigmas(sigmas) def sigma(self, timestep): alpha_cumprod = (torch.cos((timestep + self.cosine_s) / (1 + self.cosine_s) * torch.pi * 0.5) ** 2 / self._init_alpha_cumprod) if self.shift != 1.0: var = alpha_cumprod logSNR = (var/(1-var)).log() logSNR += 2 * torch.log(1.0 / torch.tensor(self.shift)) alpha_cumprod = logSNR.sigmoid() alpha_cumprod = alpha_cumprod.clamp(0.0001, 0.9999) return ((1 - alpha_cumprod) / alpha_cumprod) ** 0.5 def timestep(self, sigma): var = 1 / ((sigma * sigma) + 1) var = var.clamp(0, 1.0) s, min_var = self.cosine_s.to(var.device), self._init_alpha_cumprod.to(var.device) t = (((var * min_var) ** 0.5).acos() / (torch.pi * 0.5)) * (1 + s) - s return t def percent_to_sigma(self, percent): if percent <= 0.0: return 999999999.9 if percent >= 1.0: return 0.0 percent = 1.0 - percent return self.sigma(torch.tensor(percent))
""" This file is part of ComfyUI. Copyright (C) 2024 Stability AI This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <https: """ import torch import comfy.model_management def cast_bias_weight(s, input): bias = None non_blocking = comfy.model_management.device_should_use_non_blocking(input.device) if s.bias is not None: bias = s.bias.to(device=input.device, dtype=input.dtype, non_blocking=non_blocking) if s.bias_function is not None: bias = s.bias_function(bias) weight = s.weight.to(device=input.device, dtype=input.dtype, non_blocking=non_blocking) if s.weight_function is not None: weight = s.weight_function(weight) return weight, bias class CastWeightBiasOp: comfy_cast_weights = False weight_function = None bias_function = None class disable_weight_init: class Linear(torch.nn.Linear, CastWeightBiasOp): def reset_parameters(self): return None def forward_comfy_cast_weights(self, input): weight, bias = cast_bias_weight(self, input) return torch.nn.functional.linear(input, weight, bias) def forward(self, *args, **kwargs): if self.comfy_cast_weights: return self.forward_comfy_cast_weights(*args, **kwargs) else: return super().forward(*args, **kwargs) class Conv1d(torch.nn.Conv1d, CastWeightBiasOp): def reset_parameters(self): return None def forward_comfy_cast_weights(self, input): weight, bias = cast_bias_weight(self, input) return self._conv_forward(input, weight, bias) def forward(self, *args, **kwargs): if self.comfy_cast_weights: return self.forward_comfy_cast_weights(*args, **kwargs) else: return super().forward(*args, **kwargs) class Conv2d(torch.nn.Conv2d, CastWeightBiasOp): def reset_parameters(self): return None def forward_comfy_cast_weights(self, input): weight, bias = cast_bias_weight(self, input) return self._conv_forward(input, weight, bias) def forward(self, *args, **kwargs): if self.comfy_cast_weights: return self.forward_comfy_cast_weights(*args, **kwargs) else: return super().forward(*args, **kwargs) class Conv3d(torch.nn.Conv3d, CastWeightBiasOp): def reset_parameters(self): return None def forward_comfy_cast_weights(self, input): weight, bias = cast_bias_weight(self, input) return self._conv_forward(input, weight, bias) def forward(self, *args, **kwargs): if self.comfy_cast_weights: return self.forward_comfy_cast_weights(*args, **kwargs) else: return super().forward(*args, **kwargs) class GroupNorm(torch.nn.GroupNorm, CastWeightBiasOp): def reset_parameters(self): return None def forward_comfy_cast_weights(self, input): weight, bias = cast_bias_weight(self, input) return torch.nn.functional.group_norm(input, self.num_groups, weight, bias, self.eps) def forward(self, *args, **kwargs): if self.comfy_cast_weights: return self.forward_comfy_cast_weights(*args, **kwargs) else: return super().forward(*args, **kwargs) class LayerNorm(torch.nn.LayerNorm, CastWeightBiasOp): def reset_parameters(self): return None def forward_comfy_cast_weights(self, input): if self.weight is not None: weight, bias = cast_bias_weight(self, input) else: weight = None bias = None return torch.nn.functional.layer_norm(input, self.normalized_shape, weight, bias, self.eps) def forward(self, *args, **kwargs): if self.comfy_cast_weights: return self.forward_comfy_cast_weights(*args, **kwargs) else: return super().forward(*args, **kwargs) class ConvTranspose2d(torch.nn.ConvTranspose2d, CastWeightBiasOp): def reset_parameters(self): return None def forward_comfy_cast_weights(self, input, output_size=None): num_spatial_dims = 2 output_padding = self._output_padding( input, output_size, self.stride, self.padding, self.kernel_size, num_spatial_dims, self.dilation) weight, bias = cast_bias_weight(self, input) return torch.nn.functional.conv_transpose2d( input, weight, bias, self.stride, self.padding, output_padding, self.groups, self.dilation) def forward(self, *args, **kwargs): if self.comfy_cast_weights: return self.forward_comfy_cast_weights(*args, **kwargs) else: return super().forward(*args, **kwargs) class ConvTranspose1d(torch.nn.ConvTranspose1d, CastWeightBiasOp): def reset_parameters(self): return None def forward_comfy_cast_weights(self, input, output_size=None): num_spatial_dims = 1 output_padding = self._output_padding( input, output_size, self.stride, self.padding, self.kernel_size, num_spatial_dims, self.dilation) weight, bias = cast_bias_weight(self, input) return torch.nn.functional.conv_transpose1d( input, weight, bias, self.stride, self.padding, output_padding, self.groups, self.dilation) def forward(self, *args, **kwargs): if self.comfy_cast_weights: return self.forward_comfy_cast_weights(*args, **kwargs) else: return super().forward(*args, **kwargs) @classmethod def conv_nd(s, dims, *args, **kwargs): if dims == 2: return s.Conv2d(*args, **kwargs) elif dims == 3: return s.Conv3d(*args, **kwargs) else: raise ValueError(f"unsupported dimensions: {dims}") class manual_cast(disable_weight_init): class Linear(disable_weight_init.Linear): comfy_cast_weights = True class Conv1d(disable_weight_init.Conv1d): comfy_cast_weights = True class Conv2d(disable_weight_init.Conv2d): comfy_cast_weights = True class Conv3d(disable_weight_init.Conv3d): comfy_cast_weights = True class GroupNorm(disable_weight_init.GroupNorm): comfy_cast_weights = True class LayerNorm(disable_weight_init.LayerNorm): comfy_cast_weights = True class ConvTranspose2d(disable_weight_init.ConvTranspose2d): comfy_cast_weights = True class ConvTranspose1d(disable_weight_init.ConvTranspose1d): comfy_cast_weights = True
args_parsing = False def enable_args_parsing(enable=True): global args_parsing args_parsing = enable
import torch import comfy.model_management import comfy.samplers import comfy.utils import numpy as np import logging def prepare_noise(latent_image, seed, noise_inds=None): """ creates random noise given a latent image and a seed. optional arg skip can be used to skip and discard x number of noise generations for a given seed """ generator = torch.manual_seed(seed) if noise_inds is None: return torch.randn(latent_image.size(), dtype=latent_image.dtype, layout=latent_image.layout, generator=generator, device="cpu") unique_inds, inverse = np.unique(noise_inds, return_inverse=True) noises = [] for i in range(unique_inds[-1]+1): noise = torch.randn([1] + list(latent_image.size())[1:], dtype=latent_image.dtype, layout=latent_image.layout, generator=generator, device="cpu") if i in unique_inds: noises.append(noise) noises = [noises[i] for i in inverse] noises = torch.cat(noises, axis=0) return noises def fix_empty_latent_channels(model, latent_image): latent_channels = model.get_model_object("latent_format").latent_channels if latent_channels != latent_image.shape[1] and torch.count_nonzero(latent_image) == 0: latent_image = comfy.utils.repeat_to_batch_size(latent_image, latent_channels, dim=1) return latent_image def prepare_sampling(model, noise_shape, positive, negative, noise_mask): logging.warning("Warning: comfy.sample.prepare_sampling isn't used anymore and can be removed") return model, positive, negative, noise_mask, [] def cleanup_additional_models(models): logging.warning("Warning: comfy.sample.cleanup_additional_models isn't used anymore and can be removed") def sample(model, noise, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, denoise=1.0, disable_noise=False, start_step=None, last_step=None, force_full_denoise=False, noise_mask=None, sigmas=None, callback=None, disable_pbar=False, seed=None): sampler = comfy.samplers.KSampler(model, steps=steps, device=model.load_device, sampler=sampler_name, scheduler=scheduler, denoise=denoise, model_options=model.model_options) samples = sampler.sample(noise, positive, negative, cfg=cfg, latent_image=latent_image, start_step=start_step, last_step=last_step, force_full_denoise=force_full_denoise, denoise_mask=noise_mask, sigmas=sigmas, callback=callback, disable_pbar=disable_pbar, seed=seed) samples = samples.to(comfy.model_management.intermediate_device()) return samples def sample_custom(model, noise, cfg, sampler, sigmas, positive, negative, latent_image, noise_mask=None, callback=None, disable_pbar=False, seed=None): samples = comfy.samplers.sample(model, noise, positive, negative, cfg, model.load_device, sampler, sigmas, model_options=model.model_options, latent_image=latent_image, denoise_mask=noise_mask, callback=callback, disable_pbar=disable_pbar, seed=seed) samples = samples.to(comfy.model_management.intermediate_device()) return samples
from .k_diffusion import sampling as k_diffusion_sampling from .extra_samplers import uni_pc import torch import collections from comfy import model_management import math import logging import comfy.sampler_helpers def get_area_and_mult(conds, x_in, timestep_in): dims = tuple(x_in.shape[2:]) area = None strength = 1.0 if 'timestep_start' in conds: timestep_start = conds['timestep_start'] if timestep_in[0] > timestep_start: return None if 'timestep_end' in conds: timestep_end = conds['timestep_end'] if timestep_in[0] < timestep_end: return None if 'area' in conds: area = list(conds['area']) if 'strength' in conds: strength = conds['strength'] input_x = x_in if area is not None: for i in range(len(dims)): area[i] = min(input_x.shape[i + 2] - area[len(dims) + i], area[i]) input_x = input_x.narrow(i + 2, area[len(dims) + i], area[i]) if 'mask' in conds: mask_strength = 1.0 if "mask_strength" in conds: mask_strength = conds["mask_strength"] mask = conds['mask'] assert(mask.shape[1:] == x_in.shape[2:]) mask = mask[:input_x.shape[0]] if area is not None: for i in range(len(dims)): mask = mask.narrow(i + 1, area[len(dims) + i], area[i]) mask = mask * mask_strength mask = mask.unsqueeze(1).repeat(input_x.shape[0] else: mask = torch.ones_like(input_x) mult = mask * strength if 'mask' not in conds and area is not None: rr = 8 for i in range(len(dims)): if area[len(dims) + i] != 0: for t in range(rr): m = mult.narrow(i + 2, t, 1) m *= ((1.0/rr) * (t + 1)) if (area[i] + area[len(dims) + i]) < x_in.shape[i + 2]: for t in range(rr): m = mult.narrow(i + 2, area[i] - 1 - t, 1) m *= ((1.0/rr) * (t + 1)) conditioning = {} model_conds = conds["model_conds"] for c in model_conds: conditioning[c] = model_conds[c].process_cond(batch_size=x_in.shape[0], device=x_in.device, area=area) control = conds.get('control', None) patches = None if 'gligen' in conds: gligen = conds['gligen'] patches = {} gligen_type = gligen[0] gligen_model = gligen[1] if gligen_type == "position": gligen_patch = gligen_model.model.set_position(input_x.shape, gligen[2], input_x.device) else: gligen_patch = gligen_model.model.set_empty(input_x.shape, input_x.device) patches['middle_patch'] = [gligen_patch] cond_obj = collections.namedtuple('cond_obj', ['input_x', 'mult', 'conditioning', 'area', 'control', 'patches']) return cond_obj(input_x, mult, conditioning, area, control, patches) def cond_equal_size(c1, c2): if c1 is c2: return True if c1.keys() != c2.keys(): return False for k in c1: if not c1[k].can_concat(c2[k]): return False return True def can_concat_cond(c1, c2): if c1.input_x.shape != c2.input_x.shape: return False def objects_concatable(obj1, obj2): if (obj1 is None) != (obj2 is None): return False if obj1 is not None: if obj1 is not obj2: return False return True if not objects_concatable(c1.control, c2.control): return False if not objects_concatable(c1.patches, c2.patches): return False return cond_equal_size(c1.conditioning, c2.conditioning) def cond_cat(c_list): c_crossattn = [] c_concat = [] c_adm = [] crossattn_max_len = 0 temp = {} for x in c_list: for k in x: cur = temp.get(k, []) cur.append(x[k]) temp[k] = cur out = {} for k in temp: conds = temp[k] out[k] = conds[0].concat(conds[1:]) return out def calc_cond_batch(model, conds, x_in, timestep, model_options): out_conds = [] out_counts = [] to_run = [] for i in range(len(conds)): out_conds.append(torch.zeros_like(x_in)) out_counts.append(torch.ones_like(x_in) * 1e-37) cond = conds[i] if cond is not None: for x in cond: p = get_area_and_mult(x, x_in, timestep) if p is None: continue to_run += [(p, i)] while len(to_run) > 0: first = to_run[0] first_shape = first[0][0].shape to_batch_temp = [] for x in range(len(to_run)): if can_concat_cond(to_run[x][0], first[0]): to_batch_temp += [x] to_batch_temp.reverse() to_batch = to_batch_temp[:1] free_memory = model_management.get_free_memory(x_in.device) for i in range(1, len(to_batch_temp) + 1): batch_amount = to_batch_temp[:len(to_batch_temp) input_shape = [len(batch_amount) * first_shape[0]] + list(first_shape)[1:] if model.memory_required(input_shape) < free_memory: to_batch = batch_amount break input_x = [] mult = [] c = [] cond_or_uncond = [] area = [] control = None patches = None for x in to_batch: o = to_run.pop(x) p = o[0] input_x.append(p.input_x) mult.append(p.mult) c.append(p.conditioning) area.append(p.area) cond_or_uncond.append(o[1]) control = p.control patches = p.patches batch_chunks = len(cond_or_uncond) input_x = torch.cat(input_x) c = cond_cat(c) timestep_ = torch.cat([timestep] * batch_chunks) if control is not None: c['control'] = control.get_control(input_x, timestep_, c, len(cond_or_uncond)) transformer_options = {} if 'transformer_options' in model_options: transformer_options = model_options['transformer_options'].copy() if patches is not None: if "patches" in transformer_options: cur_patches = transformer_options["patches"].copy() for p in patches: if p in cur_patches: cur_patches[p] = cur_patches[p] + patches[p] else: cur_patches[p] = patches[p] transformer_options["patches"] = cur_patches else: transformer_options["patches"] = patches transformer_options["cond_or_uncond"] = cond_or_uncond[:] transformer_options["sigmas"] = timestep c['transformer_options'] = transformer_options if 'model_function_wrapper' in model_options: output = model_options['model_function_wrapper'](model.apply_model, {"input": input_x, "timestep": timestep_, "c": c, "cond_or_uncond": cond_or_uncond}).chunk(batch_chunks) else: output = model.apply_model(input_x, timestep_, **c).chunk(batch_chunks) for o in range(batch_chunks): cond_index = cond_or_uncond[o] a = area[o] if a is None: out_conds[cond_index] += output[o] * mult[o] out_counts[cond_index] += mult[o] else: out_c = out_conds[cond_index] out_cts = out_counts[cond_index] dims = len(a) for i in range(dims): out_c = out_c.narrow(i + 2, a[i + dims], a[i]) out_cts = out_cts.narrow(i + 2, a[i + dims], a[i]) out_c += output[o] * mult[o] out_cts += mult[o] for i in range(len(out_conds)): out_conds[i] /= out_counts[i] return out_conds def calc_cond_uncond_batch(model, cond, uncond, x_in, timestep, model_options): logging.warning("WARNING: The comfy.samplers.calc_cond_uncond_batch function is deprecated please use the calc_cond_batch one instead.") return tuple(calc_cond_batch(model, [cond, uncond], x_in, timestep, model_options)) def cfg_function(model, cond_pred, uncond_pred, cond_scale, x, timestep, model_options={}, cond=None, uncond=None): if "sampler_cfg_function" in model_options: args = {"cond": x - cond_pred, "uncond": x - uncond_pred, "cond_scale": cond_scale, "timestep": timestep, "input": x, "sigma": timestep, "cond_denoised": cond_pred, "uncond_denoised": uncond_pred, "model": model, "model_options": model_options} cfg_result = x - model_options["sampler_cfg_function"](args) else: cfg_result = uncond_pred + (cond_pred - uncond_pred) * cond_scale for fn in model_options.get("sampler_post_cfg_function", []): args = {"denoised": cfg_result, "cond": cond, "uncond": uncond, "model": model, "uncond_denoised": uncond_pred, "cond_denoised": cond_pred, "sigma": timestep, "model_options": model_options, "input": x} cfg_result = fn(args) return cfg_result def sampling_function(model, x, timestep, uncond, cond, cond_scale, model_options={}, seed=None): if math.isclose(cond_scale, 1.0) and model_options.get("disable_cfg1_optimization", False) == False: uncond_ = None else: uncond_ = uncond conds = [cond, uncond_] out = calc_cond_batch(model, conds, x, timestep, model_options) return cfg_function(model, out[0], out[1], cond_scale, x, timestep, model_options=model_options, cond=cond, uncond=uncond_) class KSamplerX0Inpaint: def __init__(self, model, sigmas): self.inner_model = model self.sigmas = sigmas def __call__(self, x, sigma, denoise_mask, model_options={}, seed=None): if denoise_mask is not None: if "denoise_mask_function" in model_options: denoise_mask = model_options["denoise_mask_function"](sigma, denoise_mask, extra_options={"model": self.inner_model, "sigmas": self.sigmas}) latent_mask = 1. - denoise_mask x = x * denoise_mask + self.inner_model.inner_model.model_sampling.noise_scaling(sigma.reshape([sigma.shape[0]] + [1] * (len(self.noise.shape) - 1)), self.noise, self.latent_image) * latent_mask out = self.inner_model(x, sigma, model_options=model_options, seed=seed) if denoise_mask is not None: out = out * denoise_mask + self.latent_image * latent_mask return out def simple_scheduler(model_sampling, steps): s = model_sampling sigs = [] ss = len(s.sigmas) / steps for x in range(steps): sigs += [float(s.sigmas[-(1 + int(x * ss))])] sigs += [0.0] return torch.FloatTensor(sigs) def ddim_scheduler(model_sampling, steps): s = model_sampling sigs = [] ss = max(len(s.sigmas) x = 1 while x < len(s.sigmas): sigs += [float(s.sigmas[x])] x += ss sigs = sigs[::-1] sigs += [0.0] return torch.FloatTensor(sigs) def normal_scheduler(model_sampling, steps, sgm=False, floor=False): s = model_sampling start = s.timestep(s.sigma_max) end = s.timestep(s.sigma_min) if sgm: timesteps = torch.linspace(start, end, steps + 1)[:-1] else: timesteps = torch.linspace(start, end, steps) sigs = [] for x in range(len(timesteps)): ts = timesteps[x] sigs.append(s.sigma(ts)) sigs += [0.0] return torch.FloatTensor(sigs) def get_mask_aabb(masks): if masks.numel() == 0: return torch.zeros((0, 4), device=masks.device, dtype=torch.int) b = masks.shape[0] bounding_boxes = torch.zeros((b, 4), device=masks.device, dtype=torch.int) is_empty = torch.zeros((b), device=masks.device, dtype=torch.bool) for i in range(b): mask = masks[i] if mask.numel() == 0: continue if torch.max(mask != 0) == False: is_empty[i] = True continue y, x = torch.where(mask) bounding_boxes[i, 0] = torch.min(x) bounding_boxes[i, 1] = torch.min(y) bounding_boxes[i, 2] = torch.max(x) bounding_boxes[i, 3] = torch.max(y) return bounding_boxes, is_empty def resolve_areas_and_cond_masks_multidim(conditions, dims, device): for i in range(len(conditions)): c = conditions[i] if 'area' in c: area = c['area'] if area[0] == "percentage": modified = c.copy() a = area[1:] a_len = len(a) area = () for d in range(len(dims)): area += (max(1, round(a[d] * dims[d])),) for d in range(len(dims)): area += (round(a[d + a_len] * dims[d]),) modified['area'] = area c = modified conditions[i] = c if 'mask' in c: mask = c['mask'] mask = mask.to(device=device) modified = c.copy() if len(mask.shape) == len(dims): mask = mask.unsqueeze(0) if mask.shape[1:] != dims: mask = torch.nn.functional.interpolate(mask.unsqueeze(1), size=dims, mode='bilinear', align_corners=False).squeeze(1) if modified.get("set_area_to_bounds", False): bounds = torch.max(torch.abs(mask),dim=0).values.unsqueeze(0) boxes, is_empty = get_mask_aabb(bounds) if is_empty[0]: modified['area'] = (8, 8, 0, 0) else: box = boxes[0] H, W, Y, X = (box[3] - box[1] + 1, box[2] - box[0] + 1, box[1], box[0]) H = max(8, H) W = max(8, W) area = (int(H), int(W), int(Y), int(X)) modified['area'] = area modified['mask'] = mask conditions[i] = modified def resolve_areas_and_cond_masks(conditions, h, w, device): logging.warning("WARNING: The comfy.samplers.resolve_areas_and_cond_masks function is deprecated please use the resolve_areas_and_cond_masks_multidim one instead.") return resolve_areas_and_cond_masks_multidim(conditions, [h, w], device) def create_cond_with_same_area_if_none(conds, c): if 'area' not in c: return c_area = c['area'] smallest = None for x in conds: if 'area' in x: a = x['area'] if c_area[2] >= a[2] and c_area[3] >= a[3]: if a[0] + a[2] >= c_area[0] + c_area[2]: if a[1] + a[3] >= c_area[1] + c_area[3]: if smallest is None: smallest = x elif 'area' not in smallest: smallest = x else: if smallest['area'][0] * smallest['area'][1] > a[0] * a[1]: smallest = x else: if smallest is None: smallest = x if smallest is None: return if 'area' in smallest: if smallest['area'] == c_area: return out = c.copy() out['model_conds'] = smallest['model_conds'].copy() conds += [out] def calculate_start_end_timesteps(model, conds): s = model.model_sampling for t in range(len(conds)): x = conds[t] timestep_start = None timestep_end = None if 'start_percent' in x: timestep_start = s.percent_to_sigma(x['start_percent']) if 'end_percent' in x: timestep_end = s.percent_to_sigma(x['end_percent']) if (timestep_start is not None) or (timestep_end is not None): n = x.copy() if (timestep_start is not None): n['timestep_start'] = timestep_start if (timestep_end is not None): n['timestep_end'] = timestep_end conds[t] = n def pre_run_control(model, conds): s = model.model_sampling for t in range(len(conds)): x = conds[t] timestep_start = None timestep_end = None percent_to_timestep_function = lambda a: s.percent_to_sigma(a) if 'control' in x: x['control'].pre_run(model, percent_to_timestep_function) def apply_empty_x_to_equal_area(conds, uncond, name, uncond_fill_func): cond_cnets = [] cond_other = [] uncond_cnets = [] uncond_other = [] for t in range(len(conds)): x = conds[t] if 'area' not in x: if name in x and x[name] is not None: cond_cnets.append(x[name]) else: cond_other.append((x, t)) for t in range(len(uncond)): x = uncond[t] if 'area' not in x: if name in x and x[name] is not None: uncond_cnets.append(x[name]) else: uncond_other.append((x, t)) if len(uncond_cnets) > 0: return for x in range(len(cond_cnets)): temp = uncond_other[x % len(uncond_other)] o = temp[0] if name in o and o[name] is not None: n = o.copy() n[name] = uncond_fill_func(cond_cnets, x) uncond += [n] else: n = o.copy() n[name] = uncond_fill_func(cond_cnets, x) uncond[temp[1]] = n def encode_model_conds(model_function, conds, noise, device, prompt_type, **kwargs): for t in range(len(conds)): x = conds[t] params = x.copy() params["device"] = device params["noise"] = noise default_width = None if len(noise.shape) >= 4: default_width = noise.shape[3] * 8 params["width"] = params.get("width", default_width) params["height"] = params.get("height", noise.shape[2] * 8) params["prompt_type"] = params.get("prompt_type", prompt_type) for k in kwargs: if k not in params: params[k] = kwargs[k] out = model_function(**params) x = x.copy() model_conds = x['model_conds'].copy() for k in out: model_conds[k] = out[k] x['model_conds'] = model_conds conds[t] = x return conds class Sampler: def sample(self): pass def max_denoise(self, model_wrap, sigmas): max_sigma = float(model_wrap.inner_model.model_sampling.sigma_max) sigma = float(sigmas[0]) return math.isclose(max_sigma, sigma, rel_tol=1e-05) or sigma > max_sigma KSAMPLER_NAMES = ["euler", "euler_ancestral", "heun", "heunpp2","dpm_2", "dpm_2_ancestral", "lms", "dpm_fast", "dpm_adaptive", "dpmpp_2s_ancestral", "dpmpp_sde", "dpmpp_sde_gpu", "dpmpp_2m", "dpmpp_2m_sde", "dpmpp_2m_sde_gpu", "dpmpp_3m_sde", "dpmpp_3m_sde_gpu", "ddpm", "lcm", "ipndm", "ipndm_v"] class KSAMPLER(Sampler): def __init__(self, sampler_function, extra_options={}, inpaint_options={}): self.sampler_function = sampler_function self.extra_options = extra_options self.inpaint_options = inpaint_options def sample(self, model_wrap, sigmas, extra_args, callback, noise, latent_image=None, denoise_mask=None, disable_pbar=False): extra_args["denoise_mask"] = denoise_mask model_k = KSamplerX0Inpaint(model_wrap, sigmas) model_k.latent_image = latent_image if self.inpaint_options.get("random", False): generator = torch.manual_seed(extra_args.get("seed", 41) + 1) model_k.noise = torch.randn(noise.shape, generator=generator, device="cpu").to(noise.dtype).to(noise.device) else: model_k.noise = noise noise = model_wrap.inner_model.model_sampling.noise_scaling(sigmas[0], noise, latent_image, self.max_denoise(model_wrap, sigmas)) k_callback = None total_steps = len(sigmas) - 1 if callback is not None: k_callback = lambda x: callback(x["i"], x["denoised"], x["x"], total_steps) samples = self.sampler_function(model_k, noise, sigmas, extra_args=extra_args, callback=k_callback, disable=disable_pbar, **self.extra_options) samples = model_wrap.inner_model.model_sampling.inverse_noise_scaling(sigmas[-1], samples) return samples def ksampler(sampler_name, extra_options={}, inpaint_options={}): if sampler_name == "dpm_fast": def dpm_fast_function(model, noise, sigmas, extra_args, callback, disable): if len(sigmas) <= 1: return noise sigma_min = sigmas[-1] if sigma_min == 0: sigma_min = sigmas[-2] total_steps = len(sigmas) - 1 return k_diffusion_sampling.sample_dpm_fast(model, noise, sigma_min, sigmas[0], total_steps, extra_args=extra_args, callback=callback, disable=disable) sampler_function = dpm_fast_function elif sampler_name == "dpm_adaptive": def dpm_adaptive_function(model, noise, sigmas, extra_args, callback, disable, **extra_options): if len(sigmas) <= 1: return noise sigma_min = sigmas[-1] if sigma_min == 0: sigma_min = sigmas[-2] return k_diffusion_sampling.sample_dpm_adaptive(model, noise, sigma_min, sigmas[0], extra_args=extra_args, callback=callback, disable=disable, **extra_options) sampler_function = dpm_adaptive_function else: sampler_function = getattr(k_diffusion_sampling, "sample_{}".format(sampler_name)) return KSAMPLER(sampler_function, extra_options, inpaint_options) def process_conds(model, noise, conds, device, latent_image=None, denoise_mask=None, seed=None): for k in conds: conds[k] = conds[k][:] resolve_areas_and_cond_masks_multidim(conds[k], noise.shape[2:], device) for k in conds: calculate_start_end_timesteps(model, conds[k]) if hasattr(model, 'extra_conds'): for k in conds: conds[k] = encode_model_conds(model.extra_conds, conds[k], noise, device, k, latent_image=latent_image, denoise_mask=denoise_mask, seed=seed) for k in conds: for c in conds[k]: for kk in conds: if k != kk: create_cond_with_same_area_if_none(conds[kk], c) for k in conds: pre_run_control(model, conds[k]) if "positive" in conds: positive = conds["positive"] for k in conds: if k != "positive": apply_empty_x_to_equal_area(list(filter(lambda c: c.get('control_apply_to_uncond', False) == True, positive)), conds[k], 'control', lambda cond_cnets, x: cond_cnets[x]) apply_empty_x_to_equal_area(positive, conds[k], 'gligen', lambda cond_cnets, x: cond_cnets[x]) return conds class CFGGuider: def __init__(self, model_patcher): self.model_patcher = model_patcher self.model_options = model_patcher.model_options self.original_conds = {} self.cfg = 1.0 def set_conds(self, positive, negative): self.inner_set_conds({"positive": positive, "negative": negative}) def set_cfg(self, cfg): self.cfg = cfg def inner_set_conds(self, conds): for k in conds: self.original_conds[k] = comfy.sampler_helpers.convert_cond(conds[k]) def __call__(self, *args, **kwargs): return self.predict_noise(*args, **kwargs) def predict_noise(self, x, timestep, model_options={}, seed=None): return sampling_function(self.inner_model, x, timestep, self.conds.get("negative", None), self.conds.get("positive", None), self.cfg, model_options=model_options, seed=seed) def inner_sample(self, noise, latent_image, device, sampler, sigmas, denoise_mask, callback, disable_pbar, seed): if latent_image is not None and torch.count_nonzero(latent_image) > 0: latent_image = self.inner_model.process_latent_in(latent_image) self.conds = process_conds(self.inner_model, noise, self.conds, device, latent_image, denoise_mask, seed) extra_args = {"model_options": self.model_options, "seed":seed} samples = sampler.sample(self, sigmas, extra_args, callback, noise, latent_image, denoise_mask, disable_pbar) return self.inner_model.process_latent_out(samples.to(torch.float32)) def sample(self, noise, latent_image, sampler, sigmas, denoise_mask=None, callback=None, disable_pbar=False, seed=None): if sigmas.shape[-1] == 0: return latent_image self.conds = {} for k in self.original_conds: self.conds[k] = list(map(lambda a: a.copy(), self.original_conds[k])) self.inner_model, self.conds, self.loaded_models = comfy.sampler_helpers.prepare_sampling(self.model_patcher, noise.shape, self.conds) device = self.model_patcher.load_device if denoise_mask is not None: denoise_mask = comfy.sampler_helpers.prepare_mask(denoise_mask, noise.shape, device) noise = noise.to(device) latent_image = latent_image.to(device) sigmas = sigmas.to(device) output = self.inner_sample(noise, latent_image, device, sampler, sigmas, denoise_mask, callback, disable_pbar, seed) comfy.sampler_helpers.cleanup_models(self.conds, self.loaded_models) del self.inner_model del self.conds del self.loaded_models return output def sample(model, noise, positive, negative, cfg, device, sampler, sigmas, model_options={}, latent_image=None, denoise_mask=None, callback=None, disable_pbar=False, seed=None): cfg_guider = CFGGuider(model) cfg_guider.set_conds(positive, negative) cfg_guider.set_cfg(cfg) return cfg_guider.sample(noise, latent_image, sampler, sigmas, denoise_mask, callback, disable_pbar, seed) SCHEDULER_NAMES = ["normal", "karras", "exponential", "sgm_uniform", "simple", "ddim_uniform"] SAMPLER_NAMES = KSAMPLER_NAMES + ["ddim", "uni_pc", "uni_pc_bh2"] def calculate_sigmas(model_sampling, scheduler_name, steps): if scheduler_name == "karras": sigmas = k_diffusion_sampling.get_sigmas_karras(n=steps, sigma_min=float(model_sampling.sigma_min), sigma_max=float(model_sampling.sigma_max)) elif scheduler_name == "exponential": sigmas = k_diffusion_sampling.get_sigmas_exponential(n=steps, sigma_min=float(model_sampling.sigma_min), sigma_max=float(model_sampling.sigma_max)) elif scheduler_name == "normal": sigmas = normal_scheduler(model_sampling, steps) elif scheduler_name == "simple": sigmas = simple_scheduler(model_sampling, steps) elif scheduler_name == "ddim_uniform": sigmas = ddim_scheduler(model_sampling, steps) elif scheduler_name == "sgm_uniform": sigmas = normal_scheduler(model_sampling, steps, sgm=True) else: logging.error("error invalid scheduler {}".format(scheduler_name)) return sigmas def sampler_object(name): if name == "uni_pc": sampler = KSAMPLER(uni_pc.sample_unipc) elif name == "uni_pc_bh2": sampler = KSAMPLER(uni_pc.sample_unipc_bh2) elif name == "ddim": sampler = ksampler("euler", inpaint_options={"random": True}) else: sampler = ksampler(name) return sampler class KSampler: SCHEDULERS = SCHEDULER_NAMES SAMPLERS = SAMPLER_NAMES DISCARD_PENULTIMATE_SIGMA_SAMPLERS = set(('dpm_2', 'dpm_2_ancestral', 'uni_pc', 'uni_pc_bh2')) def __init__(self, model, steps, device, sampler=None, scheduler=None, denoise=None, model_options={}): self.model = model self.device = device if scheduler not in self.SCHEDULERS: scheduler = self.SCHEDULERS[0] if sampler not in self.SAMPLERS: sampler = self.SAMPLERS[0] self.scheduler = scheduler self.sampler = sampler self.set_steps(steps, denoise) self.denoise = denoise self.model_options = model_options def calculate_sigmas(self, steps): sigmas = None discard_penultimate_sigma = False if self.sampler in self.DISCARD_PENULTIMATE_SIGMA_SAMPLERS: steps += 1 discard_penultimate_sigma = True sigmas = calculate_sigmas(self.model.get_model_object("model_sampling"), self.scheduler, steps) if discard_penultimate_sigma: sigmas = torch.cat([sigmas[:-2], sigmas[-1:]]) return sigmas def set_steps(self, steps, denoise=None): self.steps = steps if denoise is None or denoise > 0.9999: self.sigmas = self.calculate_sigmas(steps).to(self.device) else: if denoise <= 0.0: self.sigmas = torch.FloatTensor([]) else: new_steps = int(steps/denoise) sigmas = self.calculate_sigmas(new_steps).to(self.device) self.sigmas = sigmas[-(steps + 1):] def sample(self, noise, positive, negative, cfg, latent_image=None, start_step=None, last_step=None, force_full_denoise=False, denoise_mask=None, sigmas=None, callback=None, disable_pbar=False, seed=None): if sigmas is None: sigmas = self.sigmas if last_step is not None and last_step < (len(sigmas) - 1): sigmas = sigmas[:last_step + 1] if force_full_denoise: sigmas[-1] = 0 if start_step is not None: if start_step < (len(sigmas) - 1): sigmas = sigmas[start_step:] else: if latent_image is not None: return latent_image else: return torch.zeros_like(noise) sampler = sampler_object(self.sampler) return sample(self.model, noise, positive, negative, cfg, self.device, sampler, sigmas, self.model_options, latent_image=latent_image, denoise_mask=denoise_mask, callback=callback, disable_pbar=disable_pbar, seed=seed)
import torch import comfy.model_management import comfy.conds def prepare_mask(noise_mask, shape, device): """ensures noise mask is of proper dimensions""" noise_mask = torch.nn.functional.interpolate(noise_mask.reshape((-1, 1, noise_mask.shape[-2], noise_mask.shape[-1])), size=(shape[2], shape[3]), mode="bilinear") noise_mask = torch.cat([noise_mask] * shape[1], dim=1) noise_mask = comfy.utils.repeat_to_batch_size(noise_mask, shape[0]) noise_mask = noise_mask.to(device) return noise_mask def get_models_from_cond(cond, model_type): models = [] for c in cond: if model_type in c: models += [c[model_type]] return models def convert_cond(cond): out = [] for c in cond: temp = c[1].copy() model_conds = temp.get("model_conds", {}) if c[0] is not None: model_conds["c_crossattn"] = comfy.conds.CONDCrossAttn(c[0]) temp["cross_attn"] = c[0] temp["model_conds"] = model_conds out.append(temp) return out def get_additional_models(conds, dtype): """loads additional models in conditioning""" cnets = [] gligen = [] for k in conds: cnets += get_models_from_cond(conds[k], "control") gligen += get_models_from_cond(conds[k], "gligen") control_nets = set(cnets) inference_memory = 0 control_models = [] for m in control_nets: control_models += m.get_models() inference_memory += m.inference_memory_requirements(dtype) gligen = [x[1] for x in gligen] models = control_models + gligen return models, inference_memory def cleanup_additional_models(models): """cleanup additional models that were loaded""" for m in models: if hasattr(m, 'cleanup'): m.cleanup() def prepare_sampling(model, noise_shape, conds): device = model.load_device real_model = None models, inference_memory = get_additional_models(conds, model.model_dtype()) comfy.model_management.load_models_gpu([model] + models, model.memory_required([noise_shape[0] * 2] + list(noise_shape[1:])) + inference_memory) real_model = model.model return real_model, conds, models def cleanup_models(conds, models): cleanup_additional_models(models) control_cleanup = [] for k in conds: control_cleanup += get_models_from_cond(conds[k], "control") cleanup_additional_models(set(control_cleanup))
from comfy import sd1_clip from transformers import T5TokenizerFast import comfy.t5 import os class T5BaseModel(sd1_clip.SDClipModel): def __init__(self, device="cpu", layer="last", layer_idx=None, dtype=None): textmodel_json_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "t5_config_base.json") super().__init__(device=device, layer=layer, layer_idx=layer_idx, textmodel_json_config=textmodel_json_config, dtype=dtype, special_tokens={"end": 1, "pad": 0}, model_class=comfy.t5.T5, enable_attention_masks=True, zero_out_masked=True) class T5BaseTokenizer(sd1_clip.SDTokenizer): def __init__(self, embedding_directory=None): tokenizer_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "t5_tokenizer") super().__init__(tokenizer_path, pad_with_end=False, embedding_size=768, embedding_key='t5base', tokenizer_class=T5TokenizerFast, has_start_token=False, pad_to_max_length=False, max_length=99999999, min_length=128) class SAT5Tokenizer(sd1_clip.SD1Tokenizer): def __init__(self, embedding_directory=None): super().__init__(embedding_directory=embedding_directory, clip_name="t5base", tokenizer=T5BaseTokenizer) class SAT5Model(sd1_clip.SD1ClipModel): def __init__(self, device="cpu", dtype=None, **kwargs): super().__init__(device=device, dtype=dtype, clip_name="t5base", clip_model=T5BaseModel, **kwargs)
import torch from enum import Enum import logging from comfy import model_management from .ldm.models.autoencoder import AutoencoderKL, AutoencodingEngine from .ldm.cascade.stage_a import StageA from .ldm.cascade.stage_c_coder import StageC_coder from .ldm.audio.autoencoder import AudioOobleckVAE import yaml import comfy.utils from . import clip_vision from . import gligen from . import diffusers_convert from . import model_detection from . import sd1_clip from . import sd2_clip from . import sdxl_clip from . import sd3_clip from . import sa_t5 import comfy.model_patcher import comfy.lora import comfy.t2i_adapter.adapter import comfy.supported_models_base import comfy.taesd.taesd def load_model_weights(model, sd): m, u = model.load_state_dict(sd, strict=False) m = set(m) unexpected_keys = set(u) k = list(sd.keys()) for x in k: if x not in unexpected_keys: w = sd.pop(x) del w if len(m) > 0: logging.warning("missing {}".format(m)) return model def load_clip_weights(model, sd): k = list(sd.keys()) for x in k: if x.startswith("cond_stage_model.transformer.") and not x.startswith("cond_stage_model.transformer.text_model."): y = x.replace("cond_stage_model.transformer.", "cond_stage_model.transformer.text_model.") sd[y] = sd.pop(x) if 'cond_stage_model.transformer.text_model.embeddings.position_ids' in sd: ids = sd['cond_stage_model.transformer.text_model.embeddings.position_ids'] if ids.dtype == torch.float32: sd['cond_stage_model.transformer.text_model.embeddings.position_ids'] = ids.round() sd = comfy.utils.clip_text_transformers_convert(sd, "cond_stage_model.model.", "cond_stage_model.transformer.") return load_model_weights(model, sd) def load_lora_for_models(model, clip, lora, strength_model, strength_clip): key_map = {} if model is not None: key_map = comfy.lora.model_lora_keys_unet(model.model, key_map) if clip is not None: key_map = comfy.lora.model_lora_keys_clip(clip.cond_stage_model, key_map) loaded = comfy.lora.load_lora(lora, key_map) if model is not None: new_modelpatcher = model.clone() k = new_modelpatcher.add_patches(loaded, strength_model) else: k = () new_modelpatcher = None if clip is not None: new_clip = clip.clone() k1 = new_clip.add_patches(loaded, strength_clip) else: k1 = () new_clip = None k = set(k) k1 = set(k1) for x in loaded: if (x not in k) and (x not in k1): logging.warning("NOT LOADED {}".format(x)) return (new_modelpatcher, new_clip) class CLIP: def __init__(self, target=None, embedding_directory=None, no_init=False): if no_init: return params = target.params.copy() clip = target.clip tokenizer = target.tokenizer load_device = model_management.text_encoder_device() offload_device = model_management.text_encoder_offload_device() params['device'] = offload_device dtype = model_management.text_encoder_dtype(load_device) params['dtype'] = dtype self.cond_stage_model = clip(**(params)) for dt in self.cond_stage_model.dtypes: if not model_management.supports_cast(load_device, dt): load_device = offload_device self.tokenizer = tokenizer(embedding_directory=embedding_directory) self.patcher = comfy.model_patcher.ModelPatcher(self.cond_stage_model, load_device=load_device, offload_device=offload_device) self.layer_idx = None logging.debug("CLIP model load device: {}, offload device: {}".format(load_device, offload_device)) def clone(self): n = CLIP(no_init=True) n.patcher = self.patcher.clone() n.cond_stage_model = self.cond_stage_model n.tokenizer = self.tokenizer n.layer_idx = self.layer_idx return n def add_patches(self, patches, strength_patch=1.0, strength_model=1.0): return self.patcher.add_patches(patches, strength_patch, strength_model) def clip_layer(self, layer_idx): self.layer_idx = layer_idx def tokenize(self, text, return_word_ids=False): return self.tokenizer.tokenize_with_weights(text, return_word_ids) def encode_from_tokens(self, tokens, return_pooled=False): self.cond_stage_model.reset_clip_options() if self.layer_idx is not None: self.cond_stage_model.set_clip_options({"layer": self.layer_idx}) if return_pooled == "unprojected": self.cond_stage_model.set_clip_options({"projected_pooled": False}) self.load_model() cond, pooled = self.cond_stage_model.encode_token_weights(tokens) if return_pooled: return cond, pooled return cond def encode(self, text): tokens = self.tokenize(text) return self.encode_from_tokens(tokens) def load_sd(self, sd, full_model=False): if full_model: return self.cond_stage_model.load_state_dict(sd, strict=False) else: return self.cond_stage_model.load_sd(sd) def get_sd(self): return self.cond_stage_model.state_dict() def load_model(self): model_management.load_model_gpu(self.patcher) return self.patcher def get_key_patches(self): return self.patcher.get_key_patches() class VAE: def __init__(self, sd=None, device=None, config=None, dtype=None): if 'decoder.up_blocks.0.resnets.0.norm1.weight' in sd.keys(): sd = diffusers_convert.convert_vae_state_dict(sd) self.memory_used_encode = lambda shape, dtype: (1767 * shape[2] * shape[3]) * model_management.dtype_size(dtype) self.memory_used_decode = lambda shape, dtype: (2178 * shape[2] * shape[3] * 64) * model_management.dtype_size(dtype) self.downscale_ratio = 8 self.upscale_ratio = 8 self.latent_channels = 4 self.output_channels = 3 self.process_input = lambda image: image * 2.0 - 1.0 self.process_output = lambda image: torch.clamp((image + 1.0) / 2.0, min=0.0, max=1.0) self.working_dtypes = [torch.bfloat16, torch.float32] if config is None: if "decoder.mid.block_1.mix_factor" in sd: encoder_config = {'double_z': True, 'z_channels': 4, 'resolution': 256, 'in_channels': 3, 'out_ch': 3, 'ch': 128, 'ch_mult': [1, 2, 4, 4], 'num_res_blocks': 2, 'attn_resolutions': [], 'dropout': 0.0} decoder_config = encoder_config.copy() decoder_config["video_kernel_size"] = [3, 1, 1] decoder_config["alpha"] = 0.0 self.first_stage_model = AutoencodingEngine(regularizer_config={'target': "comfy.ldm.models.autoencoder.DiagonalGaussianRegularizer"}, encoder_config={'target': "comfy.ldm.modules.diffusionmodules.model.Encoder", 'params': encoder_config}, decoder_config={'target': "comfy.ldm.modules.temporal_ae.VideoDecoder", 'params': decoder_config}) elif "taesd_decoder.1.weight" in sd: self.latent_channels = sd["taesd_decoder.1.weight"].shape[1] self.first_stage_model = comfy.taesd.taesd.TAESD(latent_channels=self.latent_channels) elif "vquantizer.codebook.weight" in sd: self.first_stage_model = StageA() self.downscale_ratio = 4 self.upscale_ratio = 4 self.process_input = lambda image: image self.process_output = lambda image: image elif "backbone.1.0.block.0.1.num_batches_tracked" in sd: self.first_stage_model = StageC_coder() self.downscale_ratio = 32 self.latent_channels = 16 new_sd = {} for k in sd: new_sd["encoder.{}".format(k)] = sd[k] sd = new_sd elif "blocks.11.num_batches_tracked" in sd: self.first_stage_model = StageC_coder() self.latent_channels = 16 new_sd = {} for k in sd: new_sd["previewer.{}".format(k)] = sd[k] sd = new_sd elif "encoder.backbone.1.0.block.0.1.num_batches_tracked" in sd: self.first_stage_model = StageC_coder() self.downscale_ratio = 32 self.latent_channels = 16 elif "decoder.conv_in.weight" in sd: ddconfig = {'double_z': True, 'z_channels': 4, 'resolution': 256, 'in_channels': 3, 'out_ch': 3, 'ch': 128, 'ch_mult': [1, 2, 4, 4], 'num_res_blocks': 2, 'attn_resolutions': [], 'dropout': 0.0} if 'encoder.down.2.downsample.conv.weight' not in sd and 'decoder.up.3.upsample.conv.weight' not in sd: ddconfig['ch_mult'] = [1, 2, 4] self.downscale_ratio = 4 self.upscale_ratio = 4 self.latent_channels = ddconfig['z_channels'] = sd["decoder.conv_in.weight"].shape[1] if 'quant_conv.weight' in sd: self.first_stage_model = AutoencoderKL(ddconfig=ddconfig, embed_dim=4) else: self.first_stage_model = AutoencodingEngine(regularizer_config={'target': "comfy.ldm.models.autoencoder.DiagonalGaussianRegularizer"}, encoder_config={'target': "comfy.ldm.modules.diffusionmodules.model.Encoder", 'params': ddconfig}, decoder_config={'target': "comfy.ldm.modules.diffusionmodules.model.Decoder", 'params': ddconfig}) elif "decoder.layers.0.weight_v" in sd: self.first_stage_model = AudioOobleckVAE() self.memory_used_encode = lambda shape, dtype: (1000 * shape[2]) * model_management.dtype_size(dtype) self.memory_used_decode = lambda shape, dtype: (1000 * shape[2] * 2048) * model_management.dtype_size(dtype) self.latent_channels = 64 self.output_channels = 2 self.upscale_ratio = 2048 self.downscale_ratio = 2048 self.process_output = lambda audio: audio self.process_input = lambda audio: audio self.working_dtypes = [torch.float16, torch.bfloat16, torch.float32] else: logging.warning("WARNING: No VAE weights detected, VAE not initalized.") self.first_stage_model = None return else: self.first_stage_model = AutoencoderKL(**(config['params'])) self.first_stage_model = self.first_stage_model.eval() m, u = self.first_stage_model.load_state_dict(sd, strict=False) if len(m) > 0: logging.warning("Missing VAE keys {}".format(m)) if len(u) > 0: logging.debug("Leftover VAE keys {}".format(u)) if device is None: device = model_management.vae_device() self.device = device offload_device = model_management.vae_offload_device() if dtype is None: dtype = model_management.vae_dtype(self.device, self.working_dtypes) self.vae_dtype = dtype self.first_stage_model.to(self.vae_dtype) self.output_device = model_management.intermediate_device() self.patcher = comfy.model_patcher.ModelPatcher(self.first_stage_model, load_device=self.device, offload_device=offload_device) logging.debug("VAE load device: {}, offload device: {}, dtype: {}".format(self.device, offload_device, self.vae_dtype)) def vae_encode_crop_pixels(self, pixels): dims = pixels.shape[1:-1] for d in range(len(dims)): x = (dims[d] x_offset = (dims[d] % self.downscale_ratio) if x != dims[d]: pixels = pixels.narrow(d + 1, x_offset, x) return pixels def decode_tiled_(self, samples, tile_x=64, tile_y=64, overlap = 16): steps = samples.shape[0] * comfy.utils.get_tiled_scale_steps(samples.shape[3], samples.shape[2], tile_x, tile_y, overlap) steps += samples.shape[0] * comfy.utils.get_tiled_scale_steps(samples.shape[3], samples.shape[2], tile_x steps += samples.shape[0] * comfy.utils.get_tiled_scale_steps(samples.shape[3], samples.shape[2], tile_x * 2, tile_y pbar = comfy.utils.ProgressBar(steps) decode_fn = lambda a: self.first_stage_model.decode(a.to(self.vae_dtype).to(self.device)).float() output = self.process_output( (comfy.utils.tiled_scale(samples, decode_fn, tile_x comfy.utils.tiled_scale(samples, decode_fn, tile_x * 2, tile_y comfy.utils.tiled_scale(samples, decode_fn, tile_x, tile_y, overlap, upscale_amount = self.upscale_ratio, output_device=self.output_device, pbar = pbar)) / 3.0) return output def decode_tiled_1d(self, samples, tile_x=128, overlap=32): decode_fn = lambda a: self.first_stage_model.decode(a.to(self.vae_dtype).to(self.device)).float() return comfy.utils.tiled_scale_multidim(samples, decode_fn, tile=(tile_x,), overlap=overlap, upscale_amount=self.upscale_ratio, out_channels=self.output_channels, output_device=self.output_device) def encode_tiled_(self, pixel_samples, tile_x=512, tile_y=512, overlap = 64): steps = pixel_samples.shape[0] * comfy.utils.get_tiled_scale_steps(pixel_samples.shape[3], pixel_samples.shape[2], tile_x, tile_y, overlap) steps += pixel_samples.shape[0] * comfy.utils.get_tiled_scale_steps(pixel_samples.shape[3], pixel_samples.shape[2], tile_x steps += pixel_samples.shape[0] * comfy.utils.get_tiled_scale_steps(pixel_samples.shape[3], pixel_samples.shape[2], tile_x * 2, tile_y pbar = comfy.utils.ProgressBar(steps) encode_fn = lambda a: self.first_stage_model.encode((self.process_input(a)).to(self.vae_dtype).to(self.device)).float() samples = comfy.utils.tiled_scale(pixel_samples, encode_fn, tile_x, tile_y, overlap, upscale_amount = (1/self.downscale_ratio), out_channels=self.latent_channels, output_device=self.output_device, pbar=pbar) samples += comfy.utils.tiled_scale(pixel_samples, encode_fn, tile_x * 2, tile_y samples += comfy.utils.tiled_scale(pixel_samples, encode_fn, tile_x samples /= 3.0 return samples def encode_tiled_1d(self, samples, tile_x=128 * 2048, overlap=32 * 2048): encode_fn = lambda a: self.first_stage_model.encode((self.process_input(a)).to(self.vae_dtype).to(self.device)).float() return comfy.utils.tiled_scale_multidim(samples, encode_fn, tile=(tile_x,), overlap=overlap, upscale_amount=(1/self.downscale_ratio), out_channels=self.latent_channels, output_device=self.output_device) def decode(self, samples_in): try: memory_used = self.memory_used_decode(samples_in.shape, self.vae_dtype) model_management.load_models_gpu([self.patcher], memory_required=memory_used) free_memory = model_management.get_free_memory(self.device) batch_number = int(free_memory / memory_used) batch_number = max(1, batch_number) pixel_samples = torch.empty((samples_in.shape[0], self.output_channels) + tuple(map(lambda a: a * self.upscale_ratio, samples_in.shape[2:])), device=self.output_device) for x in range(0, samples_in.shape[0], batch_number): samples = samples_in[x:x+batch_number].to(self.vae_dtype).to(self.device) pixel_samples[x:x+batch_number] = self.process_output(self.first_stage_model.decode(samples).to(self.output_device).float()) except model_management.OOM_EXCEPTION as e: logging.warning("Warning: Ran out of memory when regular VAE decoding, retrying with tiled VAE decoding.") if len(samples_in.shape) == 3: pixel_samples = self.decode_tiled_1d(samples_in) else: pixel_samples = self.decode_tiled_(samples_in) pixel_samples = pixel_samples.to(self.output_device).movedim(1,-1) return pixel_samples def decode_tiled(self, samples, tile_x=64, tile_y=64, overlap = 16): model_management.load_model_gpu(self.patcher) output = self.decode_tiled_(samples, tile_x, tile_y, overlap) return output.movedim(1,-1) def encode(self, pixel_samples): pixel_samples = self.vae_encode_crop_pixels(pixel_samples) pixel_samples = pixel_samples.movedim(-1,1) try: memory_used = self.memory_used_encode(pixel_samples.shape, self.vae_dtype) model_management.load_models_gpu([self.patcher], memory_required=memory_used) free_memory = model_management.get_free_memory(self.device) batch_number = int(free_memory / memory_used) batch_number = max(1, batch_number) samples = torch.empty((pixel_samples.shape[0], self.latent_channels) + tuple(map(lambda a: a for x in range(0, pixel_samples.shape[0], batch_number): pixels_in = self.process_input(pixel_samples[x:x+batch_number]).to(self.vae_dtype).to(self.device) samples[x:x+batch_number] = self.first_stage_model.encode(pixels_in).to(self.output_device).float() except model_management.OOM_EXCEPTION as e: logging.warning("Warning: Ran out of memory when regular VAE encoding, retrying with tiled VAE encoding.") if len(pixel_samples.shape) == 3: samples = self.encode_tiled_1d(pixel_samples) else: samples = self.encode_tiled_(pixel_samples) return samples def encode_tiled(self, pixel_samples, tile_x=512, tile_y=512, overlap = 64): pixel_samples = self.vae_encode_crop_pixels(pixel_samples) model_management.load_model_gpu(self.patcher) pixel_samples = pixel_samples.movedim(-1,1) samples = self.encode_tiled_(pixel_samples, tile_x=tile_x, tile_y=tile_y, overlap=overlap) return samples def get_sd(self): return self.first_stage_model.state_dict() class StyleModel: def __init__(self, model, device="cpu"): self.model = model def get_cond(self, input): return self.model(input.last_hidden_state) def load_style_model(ckpt_path): model_data = comfy.utils.load_torch_file(ckpt_path, safe_load=True) keys = model_data.keys() if "style_embedding" in keys: model = comfy.t2i_adapter.adapter.StyleAdapter(width=1024, context_dim=768, num_head=8, n_layes=3, num_token=8) else: raise Exception("invalid style model {}".format(ckpt_path)) model.load_state_dict(model_data) return StyleModel(model) class CLIPType(Enum): STABLE_DIFFUSION = 1 STABLE_CASCADE = 2 SD3 = 3 STABLE_AUDIO = 4 def load_clip(ckpt_paths, embedding_directory=None, clip_type=CLIPType.STABLE_DIFFUSION): clip_data = [] for p in ckpt_paths: clip_data.append(comfy.utils.load_torch_file(p, safe_load=True)) class EmptyClass: pass for i in range(len(clip_data)): if "transformer.resblocks.0.ln_1.weight" in clip_data[i]: clip_data[i] = comfy.utils.clip_text_transformers_convert(clip_data[i], "", "") else: if "text_projection" in clip_data[i]: clip_data[i]["text_projection.weight"] = clip_data[i]["text_projection"].transpose(0, 1) clip_target = EmptyClass() clip_target.params = {} if len(clip_data) == 1: if "text_model.encoder.layers.30.mlp.fc1.weight" in clip_data[0]: if clip_type == CLIPType.STABLE_CASCADE: clip_target.clip = sdxl_clip.StableCascadeClipModel clip_target.tokenizer = sdxl_clip.StableCascadeTokenizer else: clip_target.clip = sdxl_clip.SDXLRefinerClipModel clip_target.tokenizer = sdxl_clip.SDXLTokenizer elif "text_model.encoder.layers.22.mlp.fc1.weight" in clip_data[0]: clip_target.clip = sd2_clip.SD2ClipModel clip_target.tokenizer = sd2_clip.SD2Tokenizer elif "encoder.block.23.layer.1.DenseReluDense.wi_1.weight" in clip_data[0]: dtype_t5 = clip_data[0]["encoder.block.23.layer.1.DenseReluDense.wi_1.weight"].dtype clip_target.clip = sd3_clip.sd3_clip(clip_l=False, clip_g=False, t5=True, dtype_t5=dtype_t5) clip_target.tokenizer = sd3_clip.SD3Tokenizer elif "encoder.block.0.layer.0.SelfAttention.k.weight" in clip_data[0]: clip_target.clip = sa_t5.SAT5Model clip_target.tokenizer = sa_t5.SAT5Tokenizer else: clip_target.clip = sd1_clip.SD1ClipModel clip_target.tokenizer = sd1_clip.SD1Tokenizer elif len(clip_data) == 2: if clip_type == CLIPType.SD3: clip_target.clip = sd3_clip.sd3_clip(clip_l=True, clip_g=True, t5=False) clip_target.tokenizer = sd3_clip.SD3Tokenizer else: clip_target.clip = sdxl_clip.SDXLClipModel clip_target.tokenizer = sdxl_clip.SDXLTokenizer elif len(clip_data) == 3: clip_target.clip = sd3_clip.SD3ClipModel clip_target.tokenizer = sd3_clip.SD3Tokenizer clip = CLIP(clip_target, embedding_directory=embedding_directory) for c in clip_data: m, u = clip.load_sd(c) if len(m) > 0: logging.warning("clip missing: {}".format(m)) if len(u) > 0: logging.debug("clip unexpected: {}".format(u)) return clip def load_gligen(ckpt_path): data = comfy.utils.load_torch_file(ckpt_path, safe_load=True) model = gligen.load_gligen(data) if model_management.should_use_fp16(): model = model.half() return comfy.model_patcher.ModelPatcher(model, load_device=model_management.get_torch_device(), offload_device=model_management.unet_offload_device()) def load_checkpoint(config_path=None, ckpt_path=None, output_vae=True, output_clip=True, embedding_directory=None, state_dict=None, config=None): logging.warning("Warning: The load checkpoint with config function is deprecated and will eventually be removed, please use the other one.") model, clip, vae, _ = load_checkpoint_guess_config(ckpt_path, output_vae=output_vae, output_clip=output_clip, output_clipvision=False, embedding_directory=embedding_directory, output_model=True) if config is None: with open(config_path, 'r') as stream: config = yaml.safe_load(stream) model_config_params = config['model']['params'] clip_config = model_config_params['cond_stage_config'] scale_factor = model_config_params['scale_factor'] if "parameterization" in model_config_params: if model_config_params["parameterization"] == "v": m = model.clone() class ModelSamplingAdvanced(comfy.model_sampling.ModelSamplingDiscrete, comfy.model_sampling.V_PREDICTION): pass m.add_object_patch("model_sampling", ModelSamplingAdvanced(model.model.model_config)) model = m layer_idx = clip_config.get("params", {}).get("layer_idx", None) if layer_idx is not None: clip.clip_layer(layer_idx) return (model, clip, vae) def load_checkpoint_guess_config(ckpt_path, output_vae=True, output_clip=True, output_clipvision=False, embedding_directory=None, output_model=True): sd = comfy.utils.load_torch_file(ckpt_path) sd_keys = sd.keys() clip = None clipvision = None vae = None model = None model_patcher = None clip_target = None diffusion_model_prefix = model_detection.unet_prefix_from_state_dict(sd) parameters = comfy.utils.calculate_parameters(sd, diffusion_model_prefix) load_device = model_management.get_torch_device() model_config = model_detection.model_config_from_unet(sd, diffusion_model_prefix) unet_dtype = model_management.unet_dtype(model_params=parameters, supported_dtypes=model_config.supported_inference_dtypes) manual_cast_dtype = model_management.unet_manual_cast(unet_dtype, load_device, model_config.supported_inference_dtypes) model_config.set_inference_dtype(unet_dtype, manual_cast_dtype) if model_config is None: raise RuntimeError("ERROR: Could not detect model type of: {}".format(ckpt_path)) if model_config.clip_vision_prefix is not None: if output_clipvision: clipvision = clip_vision.load_clipvision_from_sd(sd, model_config.clip_vision_prefix, True) if output_model: inital_load_device = model_management.unet_inital_load_device(parameters, unet_dtype) offload_device = model_management.unet_offload_device() model = model_config.get_model(sd, diffusion_model_prefix, device=inital_load_device) model.load_model_weights(sd, diffusion_model_prefix) if output_vae: vae_sd = comfy.utils.state_dict_prefix_replace(sd, {k: "" for k in model_config.vae_key_prefix}, filter_keys=True) vae_sd = model_config.process_vae_state_dict(vae_sd) vae = VAE(sd=vae_sd) if output_clip: clip_target = model_config.clip_target(state_dict=sd) if clip_target is not None: clip_sd = model_config.process_clip_state_dict(sd) if len(clip_sd) > 0: clip = CLIP(clip_target, embedding_directory=embedding_directory) m, u = clip.load_sd(clip_sd, full_model=True) if len(m) > 0: m_filter = list(filter(lambda a: ".logit_scale" not in a and ".transformer.text_projection.weight" not in a, m)) if len(m_filter) > 0: logging.warning("clip missing: {}".format(m)) else: logging.debug("clip missing: {}".format(m)) if len(u) > 0: logging.debug("clip unexpected {}:".format(u)) else: logging.warning("no CLIP/text encoder weights in checkpoint, the text encoder model will not be loaded.") left_over = sd.keys() if len(left_over) > 0: logging.debug("left over keys: {}".format(left_over)) if output_model: model_patcher = comfy.model_patcher.ModelPatcher(model, load_device=load_device, offload_device=model_management.unet_offload_device(), current_device=inital_load_device) if inital_load_device != torch.device("cpu"): logging.info("loaded straight to GPU") model_management.load_model_gpu(model_patcher) return (model_patcher, clip, vae, clipvision) def load_unet_state_dict(sd): parameters = comfy.utils.calculate_parameters(sd) unet_dtype = model_management.unet_dtype(model_params=parameters) load_device = model_management.get_torch_device() if 'transformer_blocks.0.attn.add_q_proj.weight' in sd: new_sd = model_detection.convert_diffusers_mmdit(sd, "") if new_sd is None: return None model_config = model_detection.model_config_from_unet(new_sd, "") if model_config is None: return None elif "input_blocks.0.0.weight" in sd or 'clf.1.weight' in sd: model_config = model_detection.model_config_from_unet(sd, "") if model_config is None: return None new_sd = sd else: model_config = model_detection.model_config_from_diffusers_unet(sd) if model_config is None: return None diffusers_keys = comfy.utils.unet_to_diffusers(model_config.unet_config) new_sd = {} for k in diffusers_keys: if k in sd: new_sd[diffusers_keys[k]] = sd.pop(k) else: logging.warning("{} {}".format(diffusers_keys[k], k)) offload_device = model_management.unet_offload_device() unet_dtype = model_management.unet_dtype(model_params=parameters, supported_dtypes=model_config.supported_inference_dtypes) manual_cast_dtype = model_management.unet_manual_cast(unet_dtype, load_device, model_config.supported_inference_dtypes) model_config.set_inference_dtype(unet_dtype, manual_cast_dtype) model = model_config.get_model(new_sd, "") model = model.to(offload_device) model.load_model_weights(new_sd, "") left_over = sd.keys() if len(left_over) > 0: logging.info("left over keys in unet: {}".format(left_over)) return comfy.model_patcher.ModelPatcher(model, load_device=load_device, offload_device=offload_device) def load_unet(unet_path): sd = comfy.utils.load_torch_file(unet_path) model = load_unet_state_dict(sd) if model is None: logging.error("ERROR UNSUPPORTED UNET {}".format(unet_path)) raise RuntimeError("ERROR: Could not detect model type of: {}".format(unet_path)) return model def save_checkpoint(output_path, model, clip=None, vae=None, clip_vision=None, metadata=None, extra_keys={}): clip_sd = None load_models = [model] if clip is not None: load_models.append(clip.load_model()) clip_sd = clip.get_sd() model_management.load_models_gpu(load_models, force_patch_weights=True) clip_vision_sd = clip_vision.get_sd() if clip_vision is not None else None sd = model.model.state_dict_for_saving(clip_sd, vae.get_sd(), clip_vision_sd) for k in extra_keys: sd[k] = extra_keys[k] comfy.utils.save_torch_file(sd, output_path, metadata=metadata)
import os from transformers import CLIPTokenizer import comfy.ops import torch import traceback import zipfile from . import model_management import comfy.clip_model import json import logging import numbers def gen_empty_tokens(special_tokens, length): start_token = special_tokens.get("start", None) end_token = special_tokens.get("end", None) pad_token = special_tokens.get("pad") output = [] if start_token is not None: output.append(start_token) if end_token is not None: output.append(end_token) output += [pad_token] * (length - len(output)) return output class ClipTokenWeightEncoder: def encode_token_weights(self, token_weight_pairs): to_encode = list() max_token_len = 0 has_weights = False for x in token_weight_pairs: tokens = list(map(lambda a: a[0], x)) max_token_len = max(len(tokens), max_token_len) has_weights = has_weights or not all(map(lambda a: a[1] == 1.0, x)) to_encode.append(tokens) sections = len(to_encode) if has_weights or sections == 0: to_encode.append(gen_empty_tokens(self.special_tokens, max_token_len)) out, pooled = self.encode(to_encode) if pooled is not None: first_pooled = pooled[0:1].to(model_management.intermediate_device()) else: first_pooled = pooled output = [] for k in range(0, sections): z = out[k:k+1] if has_weights: z_empty = out[-1] for i in range(len(z)): for j in range(len(z[i])): weight = token_weight_pairs[k][j][1] if weight != 1.0: z[i][j] = (z[i][j] - z_empty[j]) * weight + z_empty[j] output.append(z) if (len(output) == 0): return out[-1:].to(model_management.intermediate_device()), first_pooled return torch.cat(output, dim=-2).to(model_management.intermediate_device()), first_pooled class SDClipModel(torch.nn.Module, ClipTokenWeightEncoder): """Uses the CLIP transformer encoder for text (from huggingface)""" LAYERS = [ "last", "pooled", "hidden" ] def __init__(self, version="openai/clip-vit-large-patch14", device="cpu", max_length=77, freeze=True, layer="last", layer_idx=None, textmodel_json_config=None, dtype=None, model_class=comfy.clip_model.CLIPTextModel, special_tokens={"start": 49406, "end": 49407, "pad": 49407}, layer_norm_hidden_state=True, enable_attention_masks=False, zero_out_masked=False, return_projected_pooled=True): super().__init__() assert layer in self.LAYERS if textmodel_json_config is None: textmodel_json_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "sd1_clip_config.json") with open(textmodel_json_config) as f: config = json.load(f) self.transformer = model_class(config, dtype, device, comfy.ops.manual_cast) self.num_layers = self.transformer.num_layers self.max_length = max_length if freeze: self.freeze() self.layer = layer self.layer_idx = None self.special_tokens = special_tokens self.logit_scale = torch.nn.Parameter(torch.tensor(4.6055)) self.enable_attention_masks = enable_attention_masks self.zero_out_masked = zero_out_masked self.layer_norm_hidden_state = layer_norm_hidden_state self.return_projected_pooled = return_projected_pooled if layer == "hidden": assert layer_idx is not None assert abs(layer_idx) < self.num_layers self.set_clip_options({"layer": layer_idx}) self.options_default = (self.layer, self.layer_idx, self.return_projected_pooled) def freeze(self): self.transformer = self.transformer.eval() for param in self.parameters(): param.requires_grad = False def set_clip_options(self, options): layer_idx = options.get("layer", self.layer_idx) self.return_projected_pooled = options.get("projected_pooled", self.return_projected_pooled) if layer_idx is None or abs(layer_idx) > self.num_layers: self.layer = "last" else: self.layer = "hidden" self.layer_idx = layer_idx def reset_clip_options(self): self.layer = self.options_default[0] self.layer_idx = self.options_default[1] self.return_projected_pooled = self.options_default[2] def set_up_textual_embeddings(self, tokens, current_embeds): out_tokens = [] next_new_token = token_dict_size = current_embeds.weight.shape[0] - 1 embedding_weights = [] for x in tokens: tokens_temp = [] for y in x: if isinstance(y, numbers.Integral): if y == token_dict_size: y = -1 tokens_temp += [int(y)] else: if y.shape[0] == current_embeds.weight.shape[1]: embedding_weights += [y] tokens_temp += [next_new_token] next_new_token += 1 else: logging.warning("WARNING: shape mismatch when trying to apply embedding, embedding will be ignored {} != {}".format(y.shape[0], current_embeds.weight.shape[1])) while len(tokens_temp) < len(x): tokens_temp += [self.special_tokens["pad"]] out_tokens += [tokens_temp] n = token_dict_size if len(embedding_weights) > 0: new_embedding = torch.nn.Embedding(next_new_token + 1, current_embeds.weight.shape[1], device=current_embeds.weight.device, dtype=current_embeds.weight.dtype) new_embedding.weight[:token_dict_size] = current_embeds.weight[:-1] for x in embedding_weights: new_embedding.weight[n] = x n += 1 new_embedding.weight[n] = current_embeds.weight[-1] self.transformer.set_input_embeddings(new_embedding) processed_tokens = [] for x in out_tokens: processed_tokens += [list(map(lambda a: n if a == -1 else a, x))] return processed_tokens def forward(self, tokens): backup_embeds = self.transformer.get_input_embeddings() device = backup_embeds.weight.device tokens = self.set_up_textual_embeddings(tokens, backup_embeds) tokens = torch.LongTensor(tokens).to(device) attention_mask = None if self.enable_attention_masks: attention_mask = torch.zeros_like(tokens) end_token = self.special_tokens.get("end", -1) for x in range(attention_mask.shape[0]): for y in range(attention_mask.shape[1]): attention_mask[x, y] = 1 if tokens[x, y] == end_token: break outputs = self.transformer(tokens, attention_mask, intermediate_output=self.layer_idx, final_layer_norm_intermediate=self.layer_norm_hidden_state) self.transformer.set_input_embeddings(backup_embeds) if self.layer == "last": z = outputs[0].float() else: z = outputs[1].float() if self.zero_out_masked and attention_mask is not None: z *= attention_mask.unsqueeze(-1).float() pooled_output = None if len(outputs) >= 3: if not self.return_projected_pooled and len(outputs) >= 4 and outputs[3] is not None: pooled_output = outputs[3].float() elif outputs[2] is not None: pooled_output = outputs[2].float() return z, pooled_output def encode(self, tokens): return self(tokens) def load_sd(self, sd): return self.transformer.load_state_dict(sd, strict=False) def parse_parentheses(string): result = [] current_item = "" nesting_level = 0 for char in string: if char == "(": if nesting_level == 0: if current_item: result.append(current_item) current_item = "(" else: current_item = "(" else: current_item += char nesting_level += 1 elif char == ")": nesting_level -= 1 if nesting_level == 0: result.append(current_item + ")") current_item = "" else: current_item += char else: current_item += char if current_item: result.append(current_item) return result def token_weights(string, current_weight): a = parse_parentheses(string) out = [] for x in a: weight = current_weight if len(x) >= 2 and x[-1] == ')' and x[0] == '(': x = x[1:-1] xx = x.rfind(":") weight *= 1.1 if xx > 0: try: weight = float(x[xx+1:]) x = x[:xx] except: pass out += token_weights(x, weight) else: out += [(x, current_weight)] return out def escape_important(text): text = text.replace("\\)", "\0\1") text = text.replace("\\(", "\0\2") return text def unescape_important(text): text = text.replace("\0\1", ")") text = text.replace("\0\2", "(") return text def safe_load_embed_zip(embed_path): with zipfile.ZipFile(embed_path) as myzip: names = list(filter(lambda a: "data/" in a, myzip.namelist())) names.reverse() for n in names: with myzip.open(n) as myfile: data = myfile.read() number = len(data) length_embed = 1024 if number < 768: continue if number % 768 == 0: length_embed = 768 num_embeds = number embed = torch.frombuffer(data, dtype=torch.float) out = embed.reshape((num_embeds, length_embed)).clone() del embed return out def expand_directory_list(directories): dirs = set() for x in directories: dirs.add(x) for root, subdir, file in os.walk(x, followlinks=True): dirs.add(root) return list(dirs) def load_embed(embedding_name, embedding_directory, embedding_size, embed_key=None): if isinstance(embedding_directory, str): embedding_directory = [embedding_directory] embedding_directory = expand_directory_list(embedding_directory) valid_file = None for embed_dir in embedding_directory: embed_path = os.path.abspath(os.path.join(embed_dir, embedding_name)) embed_dir = os.path.abspath(embed_dir) try: if os.path.commonpath((embed_dir, embed_path)) != embed_dir: continue except: continue if not os.path.isfile(embed_path): extensions = ['.safetensors', '.pt', '.bin'] for x in extensions: t = embed_path + x if os.path.isfile(t): valid_file = t break else: valid_file = embed_path if valid_file is not None: break if valid_file is None: return None embed_path = valid_file embed_out = None try: if embed_path.lower().endswith(".safetensors"): import safetensors.torch embed = safetensors.torch.load_file(embed_path, device="cpu") else: if 'weights_only' in torch.load.__code__.co_varnames: try: embed = torch.load(embed_path, weights_only=True, map_location="cpu") except: embed_out = safe_load_embed_zip(embed_path) else: embed = torch.load(embed_path, map_location="cpu") except Exception as e: logging.warning("{}\n\nerror loading embedding, skipping loading: {}".format(traceback.format_exc(), embedding_name)) return None if embed_out is None: if 'string_to_param' in embed: values = embed['string_to_param'].values() embed_out = next(iter(values)) elif isinstance(embed, list): out_list = [] for x in range(len(embed)): for k in embed[x]: t = embed[x][k] if t.shape[-1] != embedding_size: continue out_list.append(t.reshape(-1, t.shape[-1])) embed_out = torch.cat(out_list, dim=0) elif embed_key is not None and embed_key in embed: embed_out = embed[embed_key] else: values = embed.values() embed_out = next(iter(values)) return embed_out class SDTokenizer: def __init__(self, tokenizer_path=None, max_length=77, pad_with_end=True, embedding_directory=None, embedding_size=768, embedding_key='clip_l', tokenizer_class=CLIPTokenizer, has_start_token=True, pad_to_max_length=True, min_length=None): if tokenizer_path is None: tokenizer_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "sd1_tokenizer") self.tokenizer = tokenizer_class.from_pretrained(tokenizer_path) self.max_length = max_length self.min_length = min_length empty = self.tokenizer('')["input_ids"] if has_start_token: self.tokens_start = 1 self.start_token = empty[0] self.end_token = empty[1] else: self.tokens_start = 0 self.start_token = None self.end_token = empty[0] self.pad_with_end = pad_with_end self.pad_to_max_length = pad_to_max_length vocab = self.tokenizer.get_vocab() self.inv_vocab = {v: k for k, v in vocab.items()} self.embedding_directory = embedding_directory self.max_word_length = 8 self.embedding_identifier = "embedding:" self.embedding_size = embedding_size self.embedding_key = embedding_key def _try_get_embedding(self, embedding_name:str): ''' Takes a potential embedding name and tries to retrieve it. Returns a Tuple consisting of the embedding and any leftover string, embedding can be None. ''' embed = load_embed(embedding_name, self.embedding_directory, self.embedding_size, self.embedding_key) if embed is None: stripped = embedding_name.strip(',') if len(stripped) < len(embedding_name): embed = load_embed(stripped, self.embedding_directory, self.embedding_size, self.embedding_key) return (embed, embedding_name[len(stripped):]) return (embed, "") def tokenize_with_weights(self, text:str, return_word_ids=False): ''' Takes a prompt and converts it to a list of (token, weight, word id) elements. Tokens can both be integer tokens and pre computed CLIP tensors. Word id values are unique per word and embedding, where the id 0 is reserved for non word tokens. Returned list has the dimensions NxM where M is the input size of CLIP ''' if self.pad_with_end: pad_token = self.end_token else: pad_token = 0 text = escape_important(text) parsed_weights = token_weights(text, 1.0) tokens = [] for weighted_segment, weight in parsed_weights: to_tokenize = unescape_important(weighted_segment).replace("\n", " ").split(' ') to_tokenize = [x for x in to_tokenize if x != ""] for word in to_tokenize: if word.startswith(self.embedding_identifier) and self.embedding_directory is not None: embedding_name = word[len(self.embedding_identifier):].strip('\n') embed, leftover = self._try_get_embedding(embedding_name) if embed is None: logging.warning(f"warning, embedding:{embedding_name} does not exist, ignoring") else: if len(embed.shape) == 1: tokens.append([(embed, weight)]) else: tokens.append([(embed[x], weight) for x in range(embed.shape[0])]) if leftover != "": word = leftover else: continue tokens.append([(t, weight) for t in self.tokenizer(word)["input_ids"][self.tokens_start:-1]]) batched_tokens = [] batch = [] if self.start_token is not None: batch.append((self.start_token, 1.0, 0)) batched_tokens.append(batch) for i, t_group in enumerate(tokens): is_large = len(t_group) >= self.max_word_length while len(t_group) > 0: if len(t_group) + len(batch) > self.max_length - 1: remaining_length = self.max_length - len(batch) - 1 if is_large: batch.extend([(t,w,i+1) for t,w in t_group[:remaining_length]]) batch.append((self.end_token, 1.0, 0)) t_group = t_group[remaining_length:] else: batch.append((self.end_token, 1.0, 0)) if self.pad_to_max_length: batch.extend([(pad_token, 1.0, 0)] * (remaining_length)) batch = [] if self.start_token is not None: batch.append((self.start_token, 1.0, 0)) batched_tokens.append(batch) else: batch.extend([(t,w,i+1) for t,w in t_group]) t_group = [] batch.append((self.end_token, 1.0, 0)) if self.pad_to_max_length: batch.extend([(pad_token, 1.0, 0)] * (self.max_length - len(batch))) if self.min_length is not None and len(batch) < self.min_length: batch.extend([(pad_token, 1.0, 0)] * (self.min_length - len(batch))) if not return_word_ids: batched_tokens = [[(t, w) for t, w,_ in x] for x in batched_tokens] return batched_tokens def untokenize(self, token_weight_pair): return list(map(lambda a: (a, self.inv_vocab[a[0]]), token_weight_pair)) class SD1Tokenizer: def __init__(self, embedding_directory=None, clip_name="l", tokenizer=SDTokenizer): self.clip_name = clip_name self.clip = "clip_{}".format(self.clip_name) setattr(self, self.clip, tokenizer(embedding_directory=embedding_directory)) def tokenize_with_weights(self, text:str, return_word_ids=False): out = {} out[self.clip_name] = getattr(self, self.clip).tokenize_with_weights(text, return_word_ids) return out def untokenize(self, token_weight_pair): return getattr(self, self.clip).untokenize(token_weight_pair) class SD1ClipModel(torch.nn.Module): def __init__(self, device="cpu", dtype=None, clip_name="l", clip_model=SDClipModel, **kwargs): super().__init__() self.clip_name = clip_name self.clip = "clip_{}".format(self.clip_name) setattr(self, self.clip, clip_model(device=device, dtype=dtype, **kwargs)) self.dtypes = set() if dtype is not None: self.dtypes.add(dtype) def set_clip_options(self, options): getattr(self, self.clip).set_clip_options(options) def reset_clip_options(self): getattr(self, self.clip).reset_clip_options() def encode_token_weights(self, token_weight_pairs): token_weight_pairs = token_weight_pairs[self.clip_name] out, pooled = getattr(self, self.clip).encode_token_weights(token_weight_pairs) return out, pooled def load_sd(self, sd): return getattr(self, self.clip).load_sd(sd)
{ "_name_or_path": "openai/clip-vit-large-patch14", "architectures": [ "CLIPTextModel" ], "attention_dropout": 0.0, "bos_token_id": 0, "dropout": 0.0, "eos_token_id": 2, "hidden_act": "quick_gelu", "hidden_size": 768, "initializer_factor": 1.0, "initializer_range": 0.02, "intermediate_size": 3072, "layer_norm_eps": 1e-05, "max_position_embeddings": 77, "model_type": "clip_text_model", "num_attention_heads": 12, "num_hidden_layers": 12, "pad_token_id": 1, "projection_dim": 768, "torch_dtype": "float32", "transformers_version": "4.24.0", "vocab_size": 49408 }
from comfy import sd1_clip import os class SD2ClipHModel(sd1_clip.SDClipModel): def __init__(self, arch="ViT-H-14", device="cpu", max_length=77, freeze=True, layer="penultimate", layer_idx=None, dtype=None): if layer == "penultimate": layer="hidden" layer_idx=-2 textmodel_json_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "sd2_clip_config.json") super().__init__(device=device, freeze=freeze, layer=layer, layer_idx=layer_idx, textmodel_json_config=textmodel_json_config, dtype=dtype, special_tokens={"start": 49406, "end": 49407, "pad": 0}) class SD2ClipHTokenizer(sd1_clip.SDTokenizer): def __init__(self, tokenizer_path=None, embedding_directory=None): super().__init__(tokenizer_path, pad_with_end=False, embedding_directory=embedding_directory, embedding_size=1024) class SD2Tokenizer(sd1_clip.SD1Tokenizer): def __init__(self, embedding_directory=None): super().__init__(embedding_directory=embedding_directory, clip_name="h", tokenizer=SD2ClipHTokenizer) class SD2ClipModel(sd1_clip.SD1ClipModel): def __init__(self, device="cpu", dtype=None, **kwargs): super().__init__(device=device, dtype=dtype, clip_name="h", clip_model=SD2ClipHModel, **kwargs)
{ "architectures": [ "CLIPTextModel" ], "attention_dropout": 0.0, "bos_token_id": 0, "dropout": 0.0, "eos_token_id": 2, "hidden_act": "gelu", "hidden_size": 1024, "initializer_factor": 1.0, "initializer_range": 0.02, "intermediate_size": 4096, "layer_norm_eps": 1e-05, "max_position_embeddings": 77, "model_type": "clip_text_model", "num_attention_heads": 16, "num_hidden_layers": 24, "pad_token_id": 1, "projection_dim": 1024, "torch_dtype": "float32", "vocab_size": 49408 }
from comfy import sd1_clip from comfy import sdxl_clip from transformers import T5TokenizerFast import comfy.t5 import torch import os import comfy.model_management import logging class T5XXLModel(sd1_clip.SDClipModel): def __init__(self, device="cpu", layer="last", layer_idx=None, dtype=None): textmodel_json_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "t5_config_xxl.json") super().__init__(device=device, layer=layer, layer_idx=layer_idx, textmodel_json_config=textmodel_json_config, dtype=dtype, special_tokens={"end": 1, "pad": 0}, model_class=comfy.t5.T5) class T5XXLTokenizer(sd1_clip.SDTokenizer): def __init__(self, embedding_directory=None): tokenizer_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "t5_tokenizer") super().__init__(tokenizer_path, pad_with_end=False, embedding_size=4096, embedding_key='t5xxl', tokenizer_class=T5TokenizerFast, has_start_token=False, pad_to_max_length=False, max_length=99999999, min_length=77) class SDT5XXLTokenizer(sd1_clip.SD1Tokenizer): def __init__(self, embedding_directory=None): super().__init__(embedding_directory=embedding_directory, clip_name="t5xxl", tokenizer=T5XXLTokenizer) class SDT5XXLModel(sd1_clip.SD1ClipModel): def __init__(self, device="cpu", dtype=None, **kwargs): super().__init__(device=device, dtype=dtype, clip_name="t5xxl", clip_model=T5XXLModel, **kwargs) class SD3Tokenizer: def __init__(self, embedding_directory=None): self.clip_l = sd1_clip.SDTokenizer(embedding_directory=embedding_directory) self.clip_g = sdxl_clip.SDXLClipGTokenizer(embedding_directory=embedding_directory) self.t5xxl = T5XXLTokenizer(embedding_directory=embedding_directory) def tokenize_with_weights(self, text:str, return_word_ids=False): out = {} out["g"] = self.clip_g.tokenize_with_weights(text, return_word_ids) out["l"] = self.clip_l.tokenize_with_weights(text, return_word_ids) out["t5xxl"] = self.t5xxl.tokenize_with_weights(text, return_word_ids) return out def untokenize(self, token_weight_pair): return self.clip_g.untokenize(token_weight_pair) class SD3ClipModel(torch.nn.Module): def __init__(self, clip_l=True, clip_g=True, t5=True, dtype_t5=None, device="cpu", dtype=None): super().__init__() self.dtypes = set() if clip_l: self.clip_l = sd1_clip.SDClipModel(layer="hidden", layer_idx=-2, device=device, dtype=dtype, layer_norm_hidden_state=False, return_projected_pooled=False) self.dtypes.add(dtype) else: self.clip_l = None if clip_g: self.clip_g = sdxl_clip.SDXLClipG(device=device, dtype=dtype) self.dtypes.add(dtype) else: self.clip_g = None if t5: if dtype_t5 is None: dtype_t5 = dtype elif comfy.model_management.dtype_size(dtype_t5) > comfy.model_management.dtype_size(dtype): dtype_t5 = dtype if not comfy.model_management.supports_cast(device, dtype_t5): dtype_t5 = dtype self.t5xxl = T5XXLModel(device=device, dtype=dtype_t5) self.dtypes.add(dtype_t5) else: self.t5xxl = None logging.debug("Created SD3 text encoder with: clip_l {}, clip_g {}, t5xxl {}:{}".format(clip_l, clip_g, t5, dtype_t5)) def set_clip_options(self, options): if self.clip_l is not None: self.clip_l.set_clip_options(options) if self.clip_g is not None: self.clip_g.set_clip_options(options) if self.t5xxl is not None: self.t5xxl.set_clip_options(options) def reset_clip_options(self): if self.clip_l is not None: self.clip_l.reset_clip_options() if self.clip_g is not None: self.clip_g.reset_clip_options() if self.t5xxl is not None: self.t5xxl.reset_clip_options() def encode_token_weights(self, token_weight_pairs): token_weight_pairs_l = token_weight_pairs["l"] token_weight_pairs_g = token_weight_pairs["g"] token_weight_pars_t5 = token_weight_pairs["t5xxl"] lg_out = None pooled = None out = None if len(token_weight_pairs_g) > 0 or len(token_weight_pairs_l) > 0: if self.clip_l is not None: lg_out, l_pooled = self.clip_l.encode_token_weights(token_weight_pairs_l) else: l_pooled = torch.zeros((1, 768), device=comfy.model_management.intermediate_device()) if self.clip_g is not None: g_out, g_pooled = self.clip_g.encode_token_weights(token_weight_pairs_g) if lg_out is not None: lg_out = torch.cat([lg_out, g_out], dim=-1) else: lg_out = torch.nn.functional.pad(g_out, (768, 0)) else: g_out = None g_pooled = torch.zeros((1, 1280), device=comfy.model_management.intermediate_device()) if lg_out is not None: lg_out = torch.nn.functional.pad(lg_out, (0, 4096 - lg_out.shape[-1])) out = lg_out pooled = torch.cat((l_pooled, g_pooled), dim=-1) if self.t5xxl is not None: t5_out, t5_pooled = self.t5xxl.encode_token_weights(token_weight_pars_t5) if lg_out is not None: out = torch.cat([lg_out, t5_out], dim=-2) else: out = t5_out if out is None: out = torch.zeros((1, 77, 4096), device=comfy.model_management.intermediate_device()) if pooled is None: pooled = torch.zeros((1, 768 + 1280), device=comfy.model_management.intermediate_device()) return out, pooled def load_sd(self, sd): if "text_model.encoder.layers.30.mlp.fc1.weight" in sd: return self.clip_g.load_sd(sd) elif "text_model.encoder.layers.1.mlp.fc1.weight" in sd: return self.clip_l.load_sd(sd) else: return self.t5xxl.load_sd(sd) def sd3_clip(clip_l=True, clip_g=True, t5=True, dtype_t5=None): class SD3ClipModel_(SD3ClipModel): def __init__(self, device="cpu", dtype=None): super().__init__(clip_l=clip_l, clip_g=clip_g, t5=t5, dtype_t5=dtype_t5, device=device, dtype=dtype) return SD3ClipModel_
from comfy import sd1_clip import torch import os class SDXLClipG(sd1_clip.SDClipModel): def __init__(self, device="cpu", max_length=77, freeze=True, layer="penultimate", layer_idx=None, dtype=None): if layer == "penultimate": layer="hidden" layer_idx=-2 textmodel_json_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "clip_config_bigg.json") super().__init__(device=device, freeze=freeze, layer=layer, layer_idx=layer_idx, textmodel_json_config=textmodel_json_config, dtype=dtype, special_tokens={"start": 49406, "end": 49407, "pad": 0}, layer_norm_hidden_state=False) def load_sd(self, sd): return super().load_sd(sd) class SDXLClipGTokenizer(sd1_clip.SDTokenizer): def __init__(self, tokenizer_path=None, embedding_directory=None): super().__init__(tokenizer_path, pad_with_end=False, embedding_directory=embedding_directory, embedding_size=1280, embedding_key='clip_g') class SDXLTokenizer: def __init__(self, embedding_directory=None): self.clip_l = sd1_clip.SDTokenizer(embedding_directory=embedding_directory) self.clip_g = SDXLClipGTokenizer(embedding_directory=embedding_directory) def tokenize_with_weights(self, text:str, return_word_ids=False): out = {} out["g"] = self.clip_g.tokenize_with_weights(text, return_word_ids) out["l"] = self.clip_l.tokenize_with_weights(text, return_word_ids) return out def untokenize(self, token_weight_pair): return self.clip_g.untokenize(token_weight_pair) class SDXLClipModel(torch.nn.Module): def __init__(self, device="cpu", dtype=None): super().__init__() self.clip_l = sd1_clip.SDClipModel(layer="hidden", layer_idx=-2, device=device, dtype=dtype, layer_norm_hidden_state=False) self.clip_g = SDXLClipG(device=device, dtype=dtype) self.dtypes = set([dtype]) def set_clip_options(self, options): self.clip_l.set_clip_options(options) self.clip_g.set_clip_options(options) def reset_clip_options(self): self.clip_g.reset_clip_options() self.clip_l.reset_clip_options() def encode_token_weights(self, token_weight_pairs): token_weight_pairs_g = token_weight_pairs["g"] token_weight_pairs_l = token_weight_pairs["l"] g_out, g_pooled = self.clip_g.encode_token_weights(token_weight_pairs_g) l_out, l_pooled = self.clip_l.encode_token_weights(token_weight_pairs_l) return torch.cat([l_out, g_out], dim=-1), g_pooled def load_sd(self, sd): if "text_model.encoder.layers.30.mlp.fc1.weight" in sd: return self.clip_g.load_sd(sd) else: return self.clip_l.load_sd(sd) class SDXLRefinerClipModel(sd1_clip.SD1ClipModel): def __init__(self, device="cpu", dtype=None): super().__init__(device=device, dtype=dtype, clip_name="g", clip_model=SDXLClipG) class StableCascadeClipGTokenizer(sd1_clip.SDTokenizer): def __init__(self, tokenizer_path=None, embedding_directory=None): super().__init__(tokenizer_path, pad_with_end=True, embedding_directory=embedding_directory, embedding_size=1280, embedding_key='clip_g') class StableCascadeTokenizer(sd1_clip.SD1Tokenizer): def __init__(self, embedding_directory=None): super().__init__(embedding_directory=embedding_directory, clip_name="g", tokenizer=StableCascadeClipGTokenizer) class StableCascadeClipG(sd1_clip.SDClipModel): def __init__(self, device="cpu", max_length=77, freeze=True, layer="hidden", layer_idx=-1, dtype=None): textmodel_json_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "clip_config_bigg.json") super().__init__(device=device, freeze=freeze, layer=layer, layer_idx=layer_idx, textmodel_json_config=textmodel_json_config, dtype=dtype, special_tokens={"start": 49406, "end": 49407, "pad": 49407}, layer_norm_hidden_state=False, enable_attention_masks=True) def load_sd(self, sd): return super().load_sd(sd) class StableCascadeClipModel(sd1_clip.SD1ClipModel): def __init__(self, device="cpu", dtype=None): super().__init__(device=device, dtype=dtype, clip_name="g", clip_model=StableCascadeClipG)
import torch from . import model_base from . import utils from . import sd1_clip from . import sd2_clip from . import sdxl_clip from . import sd3_clip from . import sa_t5 from . import supported_models_base from . import latent_formats from . import diffusers_convert class SD15(supported_models_base.BASE): unet_config = { "context_dim": 768, "model_channels": 320, "use_linear_in_transformer": False, "adm_in_channels": None, "use_temporal_attention": False, } unet_extra_config = { "num_heads": 8, "num_head_channels": -1, } latent_format = latent_formats.SD15 def process_clip_state_dict(self, state_dict): k = list(state_dict.keys()) for x in k: if x.startswith("cond_stage_model.transformer.") and not x.startswith("cond_stage_model.transformer.text_model."): y = x.replace("cond_stage_model.transformer.", "cond_stage_model.transformer.text_model.") state_dict[y] = state_dict.pop(x) if 'cond_stage_model.transformer.text_model.embeddings.position_ids' in state_dict: ids = state_dict['cond_stage_model.transformer.text_model.embeddings.position_ids'] if ids.dtype == torch.float32: state_dict['cond_stage_model.transformer.text_model.embeddings.position_ids'] = ids.round() replace_prefix = {} replace_prefix["cond_stage_model."] = "clip_l." state_dict = utils.state_dict_prefix_replace(state_dict, replace_prefix, filter_keys=True) return state_dict def process_clip_state_dict_for_saving(self, state_dict): pop_keys = ["clip_l.transformer.text_projection.weight", "clip_l.logit_scale"] for p in pop_keys: if p in state_dict: state_dict.pop(p) replace_prefix = {"clip_l.": "cond_stage_model."} return utils.state_dict_prefix_replace(state_dict, replace_prefix) def clip_target(self, state_dict={}): return supported_models_base.ClipTarget(sd1_clip.SD1Tokenizer, sd1_clip.SD1ClipModel) class SD20(supported_models_base.BASE): unet_config = { "context_dim": 1024, "model_channels": 320, "use_linear_in_transformer": True, "adm_in_channels": None, "use_temporal_attention": False, } unet_extra_config = { "num_heads": -1, "num_head_channels": 64, "attn_precision": torch.float32, } latent_format = latent_formats.SD15 def model_type(self, state_dict, prefix=""): if self.unet_config["in_channels"] == 4: k = "{}output_blocks.11.1.transformer_blocks.0.norm1.bias".format(prefix) out = state_dict.get(k, None) if out is not None and torch.std(out, unbiased=False) > 0.09: return model_base.ModelType.V_PREDICTION return model_base.ModelType.EPS def process_clip_state_dict(self, state_dict): replace_prefix = {} replace_prefix["conditioner.embedders.0.model."] = "clip_h." replace_prefix["cond_stage_model.model."] = "clip_h." state_dict = utils.state_dict_prefix_replace(state_dict, replace_prefix, filter_keys=True) state_dict = utils.clip_text_transformers_convert(state_dict, "clip_h.", "clip_h.transformer.") return state_dict def process_clip_state_dict_for_saving(self, state_dict): replace_prefix = {} replace_prefix["clip_h"] = "cond_stage_model.model" state_dict = utils.state_dict_prefix_replace(state_dict, replace_prefix) state_dict = diffusers_convert.convert_text_enc_state_dict_v20(state_dict) return state_dict def clip_target(self, state_dict={}): return supported_models_base.ClipTarget(sd2_clip.SD2Tokenizer, sd2_clip.SD2ClipModel) class SD21UnclipL(SD20): unet_config = { "context_dim": 1024, "model_channels": 320, "use_linear_in_transformer": True, "adm_in_channels": 1536, "use_temporal_attention": False, } clip_vision_prefix = "embedder.model.visual." noise_aug_config = {"noise_schedule_config": {"timesteps": 1000, "beta_schedule": "squaredcos_cap_v2"}, "timestep_dim": 768} class SD21UnclipH(SD20): unet_config = { "context_dim": 1024, "model_channels": 320, "use_linear_in_transformer": True, "adm_in_channels": 2048, "use_temporal_attention": False, } clip_vision_prefix = "embedder.model.visual." noise_aug_config = {"noise_schedule_config": {"timesteps": 1000, "beta_schedule": "squaredcos_cap_v2"}, "timestep_dim": 1024} class SDXLRefiner(supported_models_base.BASE): unet_config = { "model_channels": 384, "use_linear_in_transformer": True, "context_dim": 1280, "adm_in_channels": 2560, "transformer_depth": [0, 0, 4, 4, 4, 4, 0, 0], "use_temporal_attention": False, } latent_format = latent_formats.SDXL def get_model(self, state_dict, prefix="", device=None): return model_base.SDXLRefiner(self, device=device) def process_clip_state_dict(self, state_dict): keys_to_replace = {} replace_prefix = {} replace_prefix["conditioner.embedders.0.model."] = "clip_g." state_dict = utils.state_dict_prefix_replace(state_dict, replace_prefix, filter_keys=True) state_dict = utils.clip_text_transformers_convert(state_dict, "clip_g.", "clip_g.transformer.") state_dict = utils.state_dict_key_replace(state_dict, keys_to_replace) return state_dict def process_clip_state_dict_for_saving(self, state_dict): replace_prefix = {} state_dict_g = diffusers_convert.convert_text_enc_state_dict_v20(state_dict, "clip_g") if "clip_g.transformer.text_model.embeddings.position_ids" in state_dict_g: state_dict_g.pop("clip_g.transformer.text_model.embeddings.position_ids") replace_prefix["clip_g"] = "conditioner.embedders.0.model" state_dict_g = utils.state_dict_prefix_replace(state_dict_g, replace_prefix) return state_dict_g def clip_target(self, state_dict={}): return supported_models_base.ClipTarget(sdxl_clip.SDXLTokenizer, sdxl_clip.SDXLRefinerClipModel) class SDXL(supported_models_base.BASE): unet_config = { "model_channels": 320, "use_linear_in_transformer": True, "transformer_depth": [0, 0, 2, 2, 10, 10], "context_dim": 2048, "adm_in_channels": 2816, "use_temporal_attention": False, } latent_format = latent_formats.SDXL def model_type(self, state_dict, prefix=""): if 'edm_mean' in state_dict and 'edm_std' in state_dict: self.latent_format = latent_formats.SDXL_Playground_2_5() self.sampling_settings["sigma_data"] = 0.5 self.sampling_settings["sigma_max"] = 80.0 self.sampling_settings["sigma_min"] = 0.002 return model_base.ModelType.EDM elif "edm_vpred.sigma_max" in state_dict: self.sampling_settings["sigma_max"] = float(state_dict["edm_vpred.sigma_max"].item()) if "edm_vpred.sigma_min" in state_dict: self.sampling_settings["sigma_min"] = float(state_dict["edm_vpred.sigma_min"].item()) return model_base.ModelType.V_PREDICTION_EDM elif "v_pred" in state_dict: return model_base.ModelType.V_PREDICTION else: return model_base.ModelType.EPS def get_model(self, state_dict, prefix="", device=None): out = model_base.SDXL(self, model_type=self.model_type(state_dict, prefix), device=device) if self.inpaint_model(): out.set_inpaint() return out def process_clip_state_dict(self, state_dict): keys_to_replace = {} replace_prefix = {} replace_prefix["conditioner.embedders.0.transformer.text_model"] = "clip_l.transformer.text_model" replace_prefix["conditioner.embedders.1.model."] = "clip_g." state_dict = utils.state_dict_prefix_replace(state_dict, replace_prefix, filter_keys=True) state_dict = utils.state_dict_key_replace(state_dict, keys_to_replace) state_dict = utils.clip_text_transformers_convert(state_dict, "clip_g.", "clip_g.transformer.") return state_dict def process_clip_state_dict_for_saving(self, state_dict): replace_prefix = {} keys_to_replace = {} state_dict_g = diffusers_convert.convert_text_enc_state_dict_v20(state_dict, "clip_g") for k in state_dict: if k.startswith("clip_l"): state_dict_g[k] = state_dict[k] state_dict_g["clip_l.transformer.text_model.embeddings.position_ids"] = torch.arange(77).expand((1, -1)) pop_keys = ["clip_l.transformer.text_projection.weight", "clip_l.logit_scale"] for p in pop_keys: if p in state_dict_g: state_dict_g.pop(p) replace_prefix["clip_g"] = "conditioner.embedders.1.model" replace_prefix["clip_l"] = "conditioner.embedders.0" state_dict_g = utils.state_dict_prefix_replace(state_dict_g, replace_prefix) return state_dict_g def clip_target(self, state_dict={}): return supported_models_base.ClipTarget(sdxl_clip.SDXLTokenizer, sdxl_clip.SDXLClipModel) class SSD1B(SDXL): unet_config = { "model_channels": 320, "use_linear_in_transformer": True, "transformer_depth": [0, 0, 2, 2, 4, 4], "context_dim": 2048, "adm_in_channels": 2816, "use_temporal_attention": False, } class Segmind_Vega(SDXL): unet_config = { "model_channels": 320, "use_linear_in_transformer": True, "transformer_depth": [0, 0, 1, 1, 2, 2], "context_dim": 2048, "adm_in_channels": 2816, "use_temporal_attention": False, } class KOALA_700M(SDXL): unet_config = { "model_channels": 320, "use_linear_in_transformer": True, "transformer_depth": [0, 2, 5], "context_dim": 2048, "adm_in_channels": 2816, "use_temporal_attention": False, } class KOALA_1B(SDXL): unet_config = { "model_channels": 320, "use_linear_in_transformer": True, "transformer_depth": [0, 2, 6], "context_dim": 2048, "adm_in_channels": 2816, "use_temporal_attention": False, } class SVD_img2vid(supported_models_base.BASE): unet_config = { "model_channels": 320, "in_channels": 8, "use_linear_in_transformer": True, "transformer_depth": [1, 1, 1, 1, 1, 1, 0, 0], "context_dim": 1024, "adm_in_channels": 768, "use_temporal_attention": True, "use_temporal_resblock": True } unet_extra_config = { "num_heads": -1, "num_head_channels": 64, "attn_precision": torch.float32, } clip_vision_prefix = "conditioner.embedders.0.open_clip.model.visual." latent_format = latent_formats.SD15 sampling_settings = {"sigma_max": 700.0, "sigma_min": 0.002} def get_model(self, state_dict, prefix="", device=None): out = model_base.SVD_img2vid(self, device=device) return out def clip_target(self, state_dict={}): return None class SV3D_u(SVD_img2vid): unet_config = { "model_channels": 320, "in_channels": 8, "use_linear_in_transformer": True, "transformer_depth": [1, 1, 1, 1, 1, 1, 0, 0], "context_dim": 1024, "adm_in_channels": 256, "use_temporal_attention": True, "use_temporal_resblock": True } vae_key_prefix = ["conditioner.embedders.1.encoder."] def get_model(self, state_dict, prefix="", device=None): out = model_base.SV3D_u(self, device=device) return out class SV3D_p(SV3D_u): unet_config = { "model_channels": 320, "in_channels": 8, "use_linear_in_transformer": True, "transformer_depth": [1, 1, 1, 1, 1, 1, 0, 0], "context_dim": 1024, "adm_in_channels": 1280, "use_temporal_attention": True, "use_temporal_resblock": True } def get_model(self, state_dict, prefix="", device=None): out = model_base.SV3D_p(self, device=device) return out class Stable_Zero123(supported_models_base.BASE): unet_config = { "context_dim": 768, "model_channels": 320, "use_linear_in_transformer": False, "adm_in_channels": None, "use_temporal_attention": False, "in_channels": 8, } unet_extra_config = { "num_heads": 8, "num_head_channels": -1, } required_keys = { "cc_projection.weight": None, "cc_projection.bias": None, } clip_vision_prefix = "cond_stage_model.model.visual." latent_format = latent_formats.SD15 def get_model(self, state_dict, prefix="", device=None): out = model_base.Stable_Zero123(self, device=device, cc_projection_weight=state_dict["cc_projection.weight"], cc_projection_bias=state_dict["cc_projection.bias"]) return out def clip_target(self, state_dict={}): return None class SD_X4Upscaler(SD20): unet_config = { "context_dim": 1024, "model_channels": 256, 'in_channels': 7, "use_linear_in_transformer": True, "adm_in_channels": None, "use_temporal_attention": False, } unet_extra_config = { "disable_self_attentions": [True, True, True, False], "num_classes": 1000, "num_heads": 8, "num_head_channels": -1, } latent_format = latent_formats.SD_X4 sampling_settings = { "linear_start": 0.0001, "linear_end": 0.02, } def get_model(self, state_dict, prefix="", device=None): out = model_base.SD_X4Upscaler(self, device=device) return out class Stable_Cascade_C(supported_models_base.BASE): unet_config = { "stable_cascade_stage": 'c', } unet_extra_config = {} latent_format = latent_formats.SC_Prior supported_inference_dtypes = [torch.bfloat16, torch.float32] sampling_settings = { "shift": 2.0, } vae_key_prefix = ["vae."] text_encoder_key_prefix = ["text_encoder."] clip_vision_prefix = "clip_l_vision." def process_unet_state_dict(self, state_dict): key_list = list(state_dict.keys()) for y in ["weight", "bias"]: suffix = "in_proj_{}".format(y) keys = filter(lambda a: a.endswith(suffix), key_list) for k_from in keys: weights = state_dict.pop(k_from) prefix = k_from[:-(len(suffix) + 1)] shape_from = weights.shape[0] for x in range(3): p = ["to_q", "to_k", "to_v"] k_to = "{}.{}.{}".format(prefix, p[x], y) state_dict[k_to] = weights[shape_from*x:shape_from*(x + 1)] return state_dict def process_clip_state_dict(self, state_dict): state_dict = utils.state_dict_prefix_replace(state_dict, {k: "" for k in self.text_encoder_key_prefix}, filter_keys=True) if "clip_g.text_projection" in state_dict: state_dict["clip_g.transformer.text_projection.weight"] = state_dict.pop("clip_g.text_projection").transpose(0, 1) return state_dict def get_model(self, state_dict, prefix="", device=None): out = model_base.StableCascade_C(self, device=device) return out def clip_target(self, state_dict={}): return supported_models_base.ClipTarget(sdxl_clip.StableCascadeTokenizer, sdxl_clip.StableCascadeClipModel) class Stable_Cascade_B(Stable_Cascade_C): unet_config = { "stable_cascade_stage": 'b', } unet_extra_config = {} latent_format = latent_formats.SC_B supported_inference_dtypes = [torch.float16, torch.bfloat16, torch.float32] sampling_settings = { "shift": 1.0, } clip_vision_prefix = None def get_model(self, state_dict, prefix="", device=None): out = model_base.StableCascade_B(self, device=device) return out class SD15_instructpix2pix(SD15): unet_config = { "context_dim": 768, "model_channels": 320, "use_linear_in_transformer": False, "adm_in_channels": None, "use_temporal_attention": False, "in_channels": 8, } def get_model(self, state_dict, prefix="", device=None): return model_base.SD15_instructpix2pix(self, device=device) class SDXL_instructpix2pix(SDXL): unet_config = { "model_channels": 320, "use_linear_in_transformer": True, "transformer_depth": [0, 0, 2, 2, 10, 10], "context_dim": 2048, "adm_in_channels": 2816, "use_temporal_attention": False, "in_channels": 8, } def get_model(self, state_dict, prefix="", device=None): return model_base.SDXL_instructpix2pix(self, model_type=self.model_type(state_dict, prefix), device=device) class SD3(supported_models_base.BASE): unet_config = { "in_channels": 16, "pos_embed_scaling_factor": None, } sampling_settings = { "shift": 3.0, } unet_extra_config = {} latent_format = latent_formats.SD3 text_encoder_key_prefix = ["text_encoders."] def get_model(self, state_dict, prefix="", device=None): out = model_base.SD3(self, device=device) return out def clip_target(self, state_dict={}): clip_l = False clip_g = False t5 = False dtype_t5 = None pref = self.text_encoder_key_prefix[0] if "{}clip_l.transformer.text_model.final_layer_norm.weight".format(pref) in state_dict: clip_l = True if "{}clip_g.transformer.text_model.final_layer_norm.weight".format(pref) in state_dict: clip_g = True t5_key = "{}t5xxl.transformer.encoder.final_layer_norm.weight".format(pref) if t5_key in state_dict: t5 = True dtype_t5 = state_dict[t5_key].dtype return supported_models_base.ClipTarget(sd3_clip.SD3Tokenizer, sd3_clip.sd3_clip(clip_l=clip_l, clip_g=clip_g, t5=t5, dtype_t5=dtype_t5)) class StableAudio(supported_models_base.BASE): unet_config = { "audio_model": "dit1.0", } sampling_settings = {"sigma_max": 500.0, "sigma_min": 0.03} unet_extra_config = {} latent_format = latent_formats.StableAudio1 text_encoder_key_prefix = ["text_encoders."] vae_key_prefix = ["pretransform.model."] def get_model(self, state_dict, prefix="", device=None): seconds_start_sd = utils.state_dict_prefix_replace(state_dict, {"conditioner.conditioners.seconds_start.": ""}, filter_keys=True) seconds_total_sd = utils.state_dict_prefix_replace(state_dict, {"conditioner.conditioners.seconds_total.": ""}, filter_keys=True) return model_base.StableAudio1(self, seconds_start_embedder_weights=seconds_start_sd, seconds_total_embedder_weights=seconds_total_sd, device=device) def process_unet_state_dict(self, state_dict): for k in list(state_dict.keys()): if k.endswith(".cross_attend_norm.beta") or k.endswith(".ff_norm.beta") or k.endswith(".pre_norm.beta"): state_dict.pop(k) return state_dict def clip_target(self, state_dict={}): return supported_models_base.ClipTarget(sa_t5.SAT5Tokenizer, sa_t5.SAT5Model) models = [Stable_Zero123, SD15_instructpix2pix, SD15, SD20, SD21UnclipL, SD21UnclipH, SDXL_instructpix2pix, SDXLRefiner, SDXL, SSD1B, KOALA_700M, KOALA_1B, Segmind_Vega, SD_X4Upscaler, Stable_Cascade_C, Stable_Cascade_B, SV3D_u, SV3D_p, SD3, StableAudio] models += [SVD_img2vid]
import torch from . import model_base from . import utils from . import latent_formats class ClipTarget: def __init__(self, tokenizer, clip): self.clip = clip self.tokenizer = tokenizer self.params = {} class BASE: unet_config = {} unet_extra_config = { "num_heads": -1, "num_head_channels": 64, } required_keys = {} clip_prefix = [] clip_vision_prefix = None noise_aug_config = None sampling_settings = {} latent_format = latent_formats.LatentFormat vae_key_prefix = ["first_stage_model."] text_encoder_key_prefix = ["cond_stage_model."] supported_inference_dtypes = [torch.float16, torch.bfloat16, torch.float32] manual_cast_dtype = None @classmethod def matches(s, unet_config, state_dict=None): for k in s.unet_config: if k not in unet_config or s.unet_config[k] != unet_config[k]: return False if state_dict is not None: for k in s.required_keys: if k not in state_dict: return False return True def model_type(self, state_dict, prefix=""): return model_base.ModelType.EPS def inpaint_model(self): return self.unet_config["in_channels"] > 4 def __init__(self, unet_config): self.unet_config = unet_config.copy() self.sampling_settings = self.sampling_settings.copy() self.latent_format = self.latent_format() for x in self.unet_extra_config: self.unet_config[x] = self.unet_extra_config[x] def get_model(self, state_dict, prefix="", device=None): if self.noise_aug_config is not None: out = model_base.SD21UNCLIP(self, self.noise_aug_config, model_type=self.model_type(state_dict, prefix), device=device) else: out = model_base.BaseModel(self, model_type=self.model_type(state_dict, prefix), device=device) if self.inpaint_model(): out.set_inpaint() return out def process_clip_state_dict(self, state_dict): state_dict = utils.state_dict_prefix_replace(state_dict, {k: "" for k in self.text_encoder_key_prefix}, filter_keys=True) return state_dict def process_unet_state_dict(self, state_dict): return state_dict def process_vae_state_dict(self, state_dict): return state_dict def process_clip_state_dict_for_saving(self, state_dict): replace_prefix = {"": self.text_encoder_key_prefix[0]} return utils.state_dict_prefix_replace(state_dict, replace_prefix) def process_clip_vision_state_dict_for_saving(self, state_dict): replace_prefix = {} if self.clip_vision_prefix is not None: replace_prefix[""] = self.clip_vision_prefix return utils.state_dict_prefix_replace(state_dict, replace_prefix) def process_unet_state_dict_for_saving(self, state_dict): replace_prefix = {"": "model.diffusion_model."} return utils.state_dict_prefix_replace(state_dict, replace_prefix) def process_vae_state_dict_for_saving(self, state_dict): replace_prefix = {"": self.vae_key_prefix[0]} return utils.state_dict_prefix_replace(state_dict, replace_prefix) def set_inference_dtype(self, dtype, manual_cast_dtype): self.unet_config['dtype'] = dtype self.manual_cast_dtype = manual_cast_dtype
import torch import math from comfy.ldm.modules.attention import optimized_attention_for_device class T5LayerNorm(torch.nn.Module): def __init__(self, hidden_size, eps=1e-6, dtype=None, device=None, operations=None): super().__init__() self.weight = torch.nn.Parameter(torch.empty(hidden_size, dtype=dtype, device=device)) self.variance_epsilon = eps def forward(self, x): variance = x.pow(2).mean(-1, keepdim=True) x = x * torch.rsqrt(variance + self.variance_epsilon) return self.weight.to(device=x.device, dtype=x.dtype) * x class T5DenseActDense(torch.nn.Module): def __init__(self, model_dim, ff_dim, dtype, device, operations): super().__init__() self.wi = operations.Linear(model_dim, ff_dim, bias=False, dtype=dtype, device=device) self.wo = operations.Linear(ff_dim, model_dim, bias=False, dtype=dtype, device=device) def forward(self, x): x = torch.nn.functional.relu(self.wi(x)) x = self.wo(x) return x class T5DenseGatedActDense(torch.nn.Module): def __init__(self, model_dim, ff_dim, dtype, device, operations): super().__init__() self.wi_0 = operations.Linear(model_dim, ff_dim, bias=False, dtype=dtype, device=device) self.wi_1 = operations.Linear(model_dim, ff_dim, bias=False, dtype=dtype, device=device) self.wo = operations.Linear(ff_dim, model_dim, bias=False, dtype=dtype, device=device) def forward(self, x): hidden_gelu = torch.nn.functional.gelu(self.wi_0(x), approximate="tanh") hidden_linear = self.wi_1(x) x = hidden_gelu * hidden_linear x = self.wo(x) return x class T5LayerFF(torch.nn.Module): def __init__(self, model_dim, ff_dim, ff_activation, dtype, device, operations): super().__init__() if ff_activation == "gelu_pytorch_tanh": self.DenseReluDense = T5DenseGatedActDense(model_dim, ff_dim, dtype, device, operations) elif ff_activation == "relu": self.DenseReluDense = T5DenseActDense(model_dim, ff_dim, dtype, device, operations) self.layer_norm = T5LayerNorm(model_dim, dtype=dtype, device=device, operations=operations) def forward(self, x): forwarded_states = self.layer_norm(x) forwarded_states = self.DenseReluDense(forwarded_states) x += forwarded_states return x class T5Attention(torch.nn.Module): def __init__(self, model_dim, inner_dim, num_heads, relative_attention_bias, dtype, device, operations): super().__init__() self.q = operations.Linear(model_dim, inner_dim, bias=False, dtype=dtype, device=device) self.k = operations.Linear(model_dim, inner_dim, bias=False, dtype=dtype, device=device) self.v = operations.Linear(model_dim, inner_dim, bias=False, dtype=dtype, device=device) self.o = operations.Linear(inner_dim, model_dim, bias=False, dtype=dtype, device=device) self.num_heads = num_heads self.relative_attention_bias = None if relative_attention_bias: self.relative_attention_num_buckets = 32 self.relative_attention_max_distance = 128 self.relative_attention_bias = torch.nn.Embedding(self.relative_attention_num_buckets, self.num_heads, device=device) @staticmethod def _relative_position_bucket(relative_position, bidirectional=True, num_buckets=32, max_distance=128): """ Adapted from Mesh Tensorflow: https: Translate relative position to a bucket number for relative attention. The relative position is defined as memory_position - query_position, i.e. the distance in tokens from the attending position to the attended-to position. If bidirectional=False, then positive relative positions are invalid. We use smaller buckets for small absolute relative_position and larger buckets for larger absolute relative_positions. All relative positions >=max_distance map to the same bucket. All relative positions <=-max_distance map to the same bucket. This should allow for more graceful generalization to longer sequences than the model has been trained on Args: relative_position: an int32 Tensor bidirectional: a boolean - whether the attention is bidirectional num_buckets: an integer max_distance: an integer Returns: a Tensor with the same shape as relative_position, containing int32 values in the range [0, num_buckets) """ relative_buckets = 0 if bidirectional: num_buckets relative_buckets += (relative_position > 0).to(torch.long) * num_buckets relative_position = torch.abs(relative_position) else: relative_position = -torch.min(relative_position, torch.zeros_like(relative_position)) max_exact = num_buckets is_small = relative_position < max_exact relative_position_if_large = max_exact + ( torch.log(relative_position.float() / max_exact) / math.log(max_distance / max_exact) * (num_buckets - max_exact) ).to(torch.long) relative_position_if_large = torch.min( relative_position_if_large, torch.full_like(relative_position_if_large, num_buckets - 1) ) relative_buckets += torch.where(is_small, relative_position, relative_position_if_large) return relative_buckets def compute_bias(self, query_length, key_length, device): """Compute binned relative position bias""" context_position = torch.arange(query_length, dtype=torch.long, device=device)[:, None] memory_position = torch.arange(key_length, dtype=torch.long, device=device)[None, :] relative_position = memory_position - context_position relative_position_bucket = self._relative_position_bucket( relative_position, bidirectional=True, num_buckets=self.relative_attention_num_buckets, max_distance=self.relative_attention_max_distance, ) values = self.relative_attention_bias(relative_position_bucket) values = values.permute([2, 0, 1]).unsqueeze(0) return values def forward(self, x, mask=None, past_bias=None, optimized_attention=None): q = self.q(x) k = self.k(x) v = self.v(x) if self.relative_attention_bias is not None: past_bias = self.compute_bias(x.shape[1], x.shape[1], x.device) if past_bias is not None: if mask is not None: mask = mask + past_bias else: mask = past_bias out = optimized_attention(q, k * ((k.shape[-1] / self.num_heads) ** 0.5), v, self.num_heads, mask) return self.o(out), past_bias class T5LayerSelfAttention(torch.nn.Module): def __init__(self, model_dim, inner_dim, ff_dim, num_heads, relative_attention_bias, dtype, device, operations): super().__init__() self.SelfAttention = T5Attention(model_dim, inner_dim, num_heads, relative_attention_bias, dtype, device, operations) self.layer_norm = T5LayerNorm(model_dim, dtype=dtype, device=device, operations=operations) def forward(self, x, mask=None, past_bias=None, optimized_attention=None): normed_hidden_states = self.layer_norm(x) output, past_bias = self.SelfAttention(self.layer_norm(x), mask=mask, past_bias=past_bias, optimized_attention=optimized_attention) x += output return x, past_bias class T5Block(torch.nn.Module): def __init__(self, model_dim, inner_dim, ff_dim, ff_activation, num_heads, relative_attention_bias, dtype, device, operations): super().__init__() self.layer = torch.nn.ModuleList() self.layer.append(T5LayerSelfAttention(model_dim, inner_dim, ff_dim, num_heads, relative_attention_bias, dtype, device, operations)) self.layer.append(T5LayerFF(model_dim, ff_dim, ff_activation, dtype, device, operations)) def forward(self, x, mask=None, past_bias=None, optimized_attention=None): x, past_bias = self.layer[0](x, mask, past_bias, optimized_attention) x = self.layer[-1](x) return x, past_bias class T5Stack(torch.nn.Module): def __init__(self, num_layers, model_dim, inner_dim, ff_dim, ff_activation, num_heads, dtype, device, operations): super().__init__() self.block = torch.nn.ModuleList( [T5Block(model_dim, inner_dim, ff_dim, ff_activation, num_heads, relative_attention_bias=(i == 0), dtype=dtype, device=device, operations=operations) for i in range(num_layers)] ) self.final_layer_norm = T5LayerNorm(model_dim, dtype=dtype, device=device, operations=operations) def forward(self, x, attention_mask=None, intermediate_output=None, final_layer_norm_intermediate=True): mask = None if attention_mask is not None: mask = 1.0 - attention_mask.to(x.dtype).reshape((attention_mask.shape[0], 1, -1, attention_mask.shape[-1])).expand(attention_mask.shape[0], 1, attention_mask.shape[-1], attention_mask.shape[-1]) mask = mask.masked_fill(mask.to(torch.bool), float("-inf")) intermediate = None optimized_attention = optimized_attention_for_device(x.device, mask=attention_mask is not None, small_input=True) past_bias = None for i, l in enumerate(self.block): x, past_bias = l(x, mask, past_bias, optimized_attention) if i == intermediate_output: intermediate = x.clone() x = self.final_layer_norm(x) if intermediate is not None and final_layer_norm_intermediate: intermediate = self.final_layer_norm(intermediate) return x, intermediate class T5(torch.nn.Module): def __init__(self, config_dict, dtype, device, operations): super().__init__() self.num_layers = config_dict["num_layers"] model_dim = config_dict["d_model"] self.encoder = T5Stack(self.num_layers, model_dim, model_dim, config_dict["d_ff"], config_dict["dense_act_fn"], config_dict["num_heads"], dtype, device, operations) self.dtype = dtype self.shared = torch.nn.Embedding(config_dict["vocab_size"], model_dim, device=device) def get_input_embeddings(self): return self.shared def set_input_embeddings(self, embeddings): self.shared = embeddings def forward(self, input_ids, *args, **kwargs): x = self.shared(input_ids) return self.encoder(x, *args, **kwargs)
{ "d_ff": 3072, "d_kv": 64, "d_model": 768, "decoder_start_token_id": 0, "dropout_rate": 0.1, "eos_token_id": 1, "dense_act_fn": "relu", "initializer_factor": 1.0, "is_encoder_decoder": true, "layer_norm_epsilon": 1e-06, "model_type": "t5", "num_decoder_layers": 12, "num_heads": 12, "num_layers": 12, "output_past": true, "pad_token_id": 0, "relative_attention_num_buckets": 32, "tie_word_embeddings": false, "vocab_size": 32128 }
{ "d_ff": 10240, "d_kv": 64, "d_model": 4096, "decoder_start_token_id": 0, "dropout_rate": 0.1, "eos_token_id": 1, "dense_act_fn": "gelu_pytorch_tanh", "initializer_factor": 1.0, "is_encoder_decoder": true, "layer_norm_epsilon": 1e-06, "model_type": "t5", "num_decoder_layers": 24, "num_heads": 64, "num_layers": 24, "output_past": true, "pad_token_id": 0, "relative_attention_num_buckets": 32, "tie_word_embeddings": false, "vocab_size": 32128 }
import torch from typing import Callable, Protocol, TypedDict, Optional, List class UnetApplyFunction(Protocol): """Function signature protocol on comfy.model_base.BaseModel.apply_model""" def __call__(self, x: torch.Tensor, t: torch.Tensor, **kwargs) -> torch.Tensor: pass class UnetApplyConds(TypedDict): """Optional conditions for unet apply function.""" c_concat: Optional[torch.Tensor] c_crossattn: Optional[torch.Tensor] control: Optional[torch.Tensor] transformer_options: Optional[dict] class UnetParams(TypedDict): input: torch.Tensor timestep: torch.Tensor c: UnetApplyConds cond_or_uncond: List[int] UnetWrapperFunction = Callable[[UnetApplyFunction, UnetParams], torch.Tensor]
import torch import math import struct import comfy.checkpoint_pickle import safetensors.torch import numpy as np from PIL import Image import logging import itertools def load_torch_file(ckpt, safe_load=False, device=None): if device is None: device = torch.device("cpu") if ckpt.lower().endswith(".safetensors"): sd = safetensors.torch.load_file(ckpt, device=device.type) else: if safe_load: if not 'weights_only' in torch.load.__code__.co_varnames: logging.warning("Warning torch.load doesn't support weights_only on this pytorch version, loading unsafely.") safe_load = False if safe_load: pl_sd = torch.load(ckpt, map_location=device, weights_only=True) else: pl_sd = torch.load(ckpt, map_location=device, pickle_module=comfy.checkpoint_pickle) if "global_step" in pl_sd: logging.debug(f"Global Step: {pl_sd['global_step']}") if "state_dict" in pl_sd: sd = pl_sd["state_dict"] else: sd = pl_sd return sd def save_torch_file(sd, ckpt, metadata=None): if metadata is not None: safetensors.torch.save_file(sd, ckpt, metadata=metadata) else: safetensors.torch.save_file(sd, ckpt) def calculate_parameters(sd, prefix=""): params = 0 for k in sd.keys(): if k.startswith(prefix): params += sd[k].nelement() return params def state_dict_key_replace(state_dict, keys_to_replace): for x in keys_to_replace: if x in state_dict: state_dict[keys_to_replace[x]] = state_dict.pop(x) return state_dict def state_dict_prefix_replace(state_dict, replace_prefix, filter_keys=False): if filter_keys: out = {} else: out = state_dict for rp in replace_prefix: replace = list(map(lambda a: (a, "{}{}".format(replace_prefix[rp], a[len(rp):])), filter(lambda a: a.startswith(rp), state_dict.keys()))) for x in replace: w = state_dict.pop(x[0]) out[x[1]] = w return out def transformers_convert(sd, prefix_from, prefix_to, number): keys_to_replace = { "{}positional_embedding": "{}embeddings.position_embedding.weight", "{}token_embedding.weight": "{}embeddings.token_embedding.weight", "{}ln_final.weight": "{}final_layer_norm.weight", "{}ln_final.bias": "{}final_layer_norm.bias", } for k in keys_to_replace: x = k.format(prefix_from) if x in sd: sd[keys_to_replace[k].format(prefix_to)] = sd.pop(x) resblock_to_replace = { "ln_1": "layer_norm1", "ln_2": "layer_norm2", "mlp.c_fc": "mlp.fc1", "mlp.c_proj": "mlp.fc2", "attn.out_proj": "self_attn.out_proj", } for resblock in range(number): for x in resblock_to_replace: for y in ["weight", "bias"]: k = "{}transformer.resblocks.{}.{}.{}".format(prefix_from, resblock, x, y) k_to = "{}encoder.layers.{}.{}.{}".format(prefix_to, resblock, resblock_to_replace[x], y) if k in sd: sd[k_to] = sd.pop(k) for y in ["weight", "bias"]: k_from = "{}transformer.resblocks.{}.attn.in_proj_{}".format(prefix_from, resblock, y) if k_from in sd: weights = sd.pop(k_from) shape_from = weights.shape[0] for x in range(3): p = ["self_attn.q_proj", "self_attn.k_proj", "self_attn.v_proj"] k_to = "{}encoder.layers.{}.{}.{}".format(prefix_to, resblock, p[x], y) sd[k_to] = weights[shape_from*x:shape_from*(x + 1)] return sd def clip_text_transformers_convert(sd, prefix_from, prefix_to): sd = transformers_convert(sd, prefix_from, "{}text_model.".format(prefix_to), 32) tp = "{}text_projection.weight".format(prefix_from) if tp in sd: sd["{}text_projection.weight".format(prefix_to)] = sd.pop(tp) tp = "{}text_projection".format(prefix_from) if tp in sd: sd["{}text_projection.weight".format(prefix_to)] = sd.pop(tp).transpose(0, 1).contiguous() return sd UNET_MAP_ATTENTIONS = { "proj_in.weight", "proj_in.bias", "proj_out.weight", "proj_out.bias", "norm.weight", "norm.bias", } TRANSFORMER_BLOCKS = { "norm1.weight", "norm1.bias", "norm2.weight", "norm2.bias", "norm3.weight", "norm3.bias", "attn1.to_q.weight", "attn1.to_k.weight", "attn1.to_v.weight", "attn1.to_out.0.weight", "attn1.to_out.0.bias", "attn2.to_q.weight", "attn2.to_k.weight", "attn2.to_v.weight", "attn2.to_out.0.weight", "attn2.to_out.0.bias", "ff.net.0.proj.weight", "ff.net.0.proj.bias", "ff.net.2.weight", "ff.net.2.bias", } UNET_MAP_RESNET = { "in_layers.2.weight": "conv1.weight", "in_layers.2.bias": "conv1.bias", "emb_layers.1.weight": "time_emb_proj.weight", "emb_layers.1.bias": "time_emb_proj.bias", "out_layers.3.weight": "conv2.weight", "out_layers.3.bias": "conv2.bias", "skip_connection.weight": "conv_shortcut.weight", "skip_connection.bias": "conv_shortcut.bias", "in_layers.0.weight": "norm1.weight", "in_layers.0.bias": "norm1.bias", "out_layers.0.weight": "norm2.weight", "out_layers.0.bias": "norm2.bias", } UNET_MAP_BASIC = { ("label_emb.0.0.weight", "class_embedding.linear_1.weight"), ("label_emb.0.0.bias", "class_embedding.linear_1.bias"), ("label_emb.0.2.weight", "class_embedding.linear_2.weight"), ("label_emb.0.2.bias", "class_embedding.linear_2.bias"), ("label_emb.0.0.weight", "add_embedding.linear_1.weight"), ("label_emb.0.0.bias", "add_embedding.linear_1.bias"), ("label_emb.0.2.weight", "add_embedding.linear_2.weight"), ("label_emb.0.2.bias", "add_embedding.linear_2.bias"), ("input_blocks.0.0.weight", "conv_in.weight"), ("input_blocks.0.0.bias", "conv_in.bias"), ("out.0.weight", "conv_norm_out.weight"), ("out.0.bias", "conv_norm_out.bias"), ("out.2.weight", "conv_out.weight"), ("out.2.bias", "conv_out.bias"), ("time_embed.0.weight", "time_embedding.linear_1.weight"), ("time_embed.0.bias", "time_embedding.linear_1.bias"), ("time_embed.2.weight", "time_embedding.linear_2.weight"), ("time_embed.2.bias", "time_embedding.linear_2.bias") } def unet_to_diffusers(unet_config): if "num_res_blocks" not in unet_config: return {} num_res_blocks = unet_config["num_res_blocks"] channel_mult = unet_config["channel_mult"] transformer_depth = unet_config["transformer_depth"][:] transformer_depth_output = unet_config["transformer_depth_output"][:] num_blocks = len(channel_mult) transformers_mid = unet_config.get("transformer_depth_middle", None) diffusers_unet_map = {} for x in range(num_blocks): n = 1 + (num_res_blocks[x] + 1) * x for i in range(num_res_blocks[x]): for b in UNET_MAP_RESNET: diffusers_unet_map["down_blocks.{}.resnets.{}.{}".format(x, i, UNET_MAP_RESNET[b])] = "input_blocks.{}.0.{}".format(n, b) num_transformers = transformer_depth.pop(0) if num_transformers > 0: for b in UNET_MAP_ATTENTIONS: diffusers_unet_map["down_blocks.{}.attentions.{}.{}".format(x, i, b)] = "input_blocks.{}.1.{}".format(n, b) for t in range(num_transformers): for b in TRANSFORMER_BLOCKS: diffusers_unet_map["down_blocks.{}.attentions.{}.transformer_blocks.{}.{}".format(x, i, t, b)] = "input_blocks.{}.1.transformer_blocks.{}.{}".format(n, t, b) n += 1 for k in ["weight", "bias"]: diffusers_unet_map["down_blocks.{}.downsamplers.0.conv.{}".format(x, k)] = "input_blocks.{}.0.op.{}".format(n, k) i = 0 for b in UNET_MAP_ATTENTIONS: diffusers_unet_map["mid_block.attentions.{}.{}".format(i, b)] = "middle_block.1.{}".format(b) for t in range(transformers_mid): for b in TRANSFORMER_BLOCKS: diffusers_unet_map["mid_block.attentions.{}.transformer_blocks.{}.{}".format(i, t, b)] = "middle_block.1.transformer_blocks.{}.{}".format(t, b) for i, n in enumerate([0, 2]): for b in UNET_MAP_RESNET: diffusers_unet_map["mid_block.resnets.{}.{}".format(i, UNET_MAP_RESNET[b])] = "middle_block.{}.{}".format(n, b) num_res_blocks = list(reversed(num_res_blocks)) for x in range(num_blocks): n = (num_res_blocks[x] + 1) * x l = num_res_blocks[x] + 1 for i in range(l): c = 0 for b in UNET_MAP_RESNET: diffusers_unet_map["up_blocks.{}.resnets.{}.{}".format(x, i, UNET_MAP_RESNET[b])] = "output_blocks.{}.0.{}".format(n, b) c += 1 num_transformers = transformer_depth_output.pop() if num_transformers > 0: c += 1 for b in UNET_MAP_ATTENTIONS: diffusers_unet_map["up_blocks.{}.attentions.{}.{}".format(x, i, b)] = "output_blocks.{}.1.{}".format(n, b) for t in range(num_transformers): for b in TRANSFORMER_BLOCKS: diffusers_unet_map["up_blocks.{}.attentions.{}.transformer_blocks.{}.{}".format(x, i, t, b)] = "output_blocks.{}.1.transformer_blocks.{}.{}".format(n, t, b) if i == l - 1: for k in ["weight", "bias"]: diffusers_unet_map["up_blocks.{}.upsamplers.0.conv.{}".format(x, k)] = "output_blocks.{}.{}.conv.{}".format(n, c, k) n += 1 for k in UNET_MAP_BASIC: diffusers_unet_map[k[1]] = k[0] return diffusers_unet_map def swap_scale_shift(weight): shift, scale = weight.chunk(2, dim=0) new_weight = torch.cat([scale, shift], dim=0) return new_weight MMDIT_MAP_BASIC = { ("context_embedder.bias", "context_embedder.bias"), ("context_embedder.weight", "context_embedder.weight"), ("t_embedder.mlp.0.bias", "time_text_embed.timestep_embedder.linear_1.bias"), ("t_embedder.mlp.0.weight", "time_text_embed.timestep_embedder.linear_1.weight"), ("t_embedder.mlp.2.bias", "time_text_embed.timestep_embedder.linear_2.bias"), ("t_embedder.mlp.2.weight", "time_text_embed.timestep_embedder.linear_2.weight"), ("x_embedder.proj.bias", "pos_embed.proj.bias"), ("x_embedder.proj.weight", "pos_embed.proj.weight"), ("y_embedder.mlp.0.bias", "time_text_embed.text_embedder.linear_1.bias"), ("y_embedder.mlp.0.weight", "time_text_embed.text_embedder.linear_1.weight"), ("y_embedder.mlp.2.bias", "time_text_embed.text_embedder.linear_2.bias"), ("y_embedder.mlp.2.weight", "time_text_embed.text_embedder.linear_2.weight"), ("pos_embed", "pos_embed.pos_embed"), ("final_layer.adaLN_modulation.1.bias", "norm_out.linear.bias", swap_scale_shift), ("final_layer.adaLN_modulation.1.weight", "norm_out.linear.weight", swap_scale_shift), ("final_layer.linear.bias", "proj_out.bias"), ("final_layer.linear.weight", "proj_out.weight"), } MMDIT_MAP_BLOCK = { ("context_block.adaLN_modulation.1.bias", "norm1_context.linear.bias"), ("context_block.adaLN_modulation.1.weight", "norm1_context.linear.weight"), ("context_block.attn.proj.bias", "attn.to_add_out.bias"), ("context_block.attn.proj.weight", "attn.to_add_out.weight"), ("context_block.mlp.fc1.bias", "ff_context.net.0.proj.bias"), ("context_block.mlp.fc1.weight", "ff_context.net.0.proj.weight"), ("context_block.mlp.fc2.bias", "ff_context.net.2.bias"), ("context_block.mlp.fc2.weight", "ff_context.net.2.weight"), ("x_block.adaLN_modulation.1.bias", "norm1.linear.bias"), ("x_block.adaLN_modulation.1.weight", "norm1.linear.weight"), ("x_block.attn.proj.bias", "attn.to_out.0.bias"), ("x_block.attn.proj.weight", "attn.to_out.0.weight"), ("x_block.mlp.fc1.bias", "ff.net.0.proj.bias"), ("x_block.mlp.fc1.weight", "ff.net.0.proj.weight"), ("x_block.mlp.fc2.bias", "ff.net.2.bias"), ("x_block.mlp.fc2.weight", "ff.net.2.weight"), } def mmdit_to_diffusers(mmdit_config, output_prefix=""): key_map = {} depth = mmdit_config.get("depth", 0) for i in range(depth): block_from = "transformer_blocks.{}".format(i) block_to = "{}joint_blocks.{}".format(output_prefix, i) offset = depth * 64 for end in ("weight", "bias"): k = "{}.attn.".format(block_from) qkv = "{}.x_block.attn.qkv.{}".format(block_to, end) key_map["{}to_q.{}".format(k, end)] = (qkv, (0, 0, offset)) key_map["{}to_k.{}".format(k, end)] = (qkv, (0, offset, offset)) key_map["{}to_v.{}".format(k, end)] = (qkv, (0, offset * 2, offset)) qkv = "{}.context_block.attn.qkv.{}".format(block_to, end) key_map["{}add_q_proj.{}".format(k, end)] = (qkv, (0, 0, offset)) key_map["{}add_k_proj.{}".format(k, end)] = (qkv, (0, offset, offset)) key_map["{}add_v_proj.{}".format(k, end)] = (qkv, (0, offset * 2, offset)) for k in MMDIT_MAP_BLOCK: key_map["{}.{}".format(block_from, k[1])] = "{}.{}".format(block_to, k[0]) map_basic = MMDIT_MAP_BASIC.copy() map_basic.add(("joint_blocks.{}.context_block.adaLN_modulation.1.bias".format(depth - 1), "transformer_blocks.{}.norm1_context.linear.bias".format(depth - 1), swap_scale_shift)) map_basic.add(("joint_blocks.{}.context_block.adaLN_modulation.1.weight".format(depth - 1), "transformer_blocks.{}.norm1_context.linear.weight".format(depth - 1), swap_scale_shift)) for k in map_basic: if len(k) > 2: key_map[k[1]] = ("{}{}".format(output_prefix, k[0]), None, k[2]) else: key_map[k[1]] = "{}{}".format(output_prefix, k[0]) return key_map def repeat_to_batch_size(tensor, batch_size, dim=0): if tensor.shape[dim] > batch_size: return tensor.narrow(dim, 0, batch_size) elif tensor.shape[dim] < batch_size: return tensor.repeat(dim * [1] + [math.ceil(batch_size / tensor.shape[dim])] + [1] * (len(tensor.shape) - 1 - dim)).narrow(dim, 0, batch_size) return tensor def resize_to_batch_size(tensor, batch_size): in_batch_size = tensor.shape[0] if in_batch_size == batch_size: return tensor if batch_size <= 1: return tensor[:batch_size] output = torch.empty([batch_size] + list(tensor.shape)[1:], dtype=tensor.dtype, device=tensor.device) if batch_size < in_batch_size: scale = (in_batch_size - 1) / (batch_size - 1) for i in range(batch_size): output[i] = tensor[min(round(i * scale), in_batch_size - 1)] else: scale = in_batch_size / batch_size for i in range(batch_size): output[i] = tensor[min(math.floor((i + 0.5) * scale), in_batch_size - 1)] return output def convert_sd_to(state_dict, dtype): keys = list(state_dict.keys()) for k in keys: state_dict[k] = state_dict[k].to(dtype) return state_dict def safetensors_header(safetensors_path, max_size=100*1024*1024): with open(safetensors_path, "rb") as f: header = f.read(8) length_of_header = struct.unpack('<Q', header)[0] if length_of_header > max_size: return None return f.read(length_of_header) def set_attr(obj, attr, value): attrs = attr.split(".") for name in attrs[:-1]: obj = getattr(obj, name) prev = getattr(obj, attrs[-1]) setattr(obj, attrs[-1], value) return prev def set_attr_param(obj, attr, value): return set_attr(obj, attr, torch.nn.Parameter(value, requires_grad=False)) def copy_to_param(obj, attr, value): attrs = attr.split(".") for name in attrs[:-1]: obj = getattr(obj, name) prev = getattr(obj, attrs[-1]) prev.data.copy_(value) def get_attr(obj, attr): attrs = attr.split(".") for name in attrs: obj = getattr(obj, name) return obj def bislerp(samples, width, height): def slerp(b1, b2, r): '''slerps batches b1, b2 according to ratio r, batches should be flat e.g. NxC''' c = b1.shape[-1] b1_norms = torch.norm(b1, dim=-1, keepdim=True) b2_norms = torch.norm(b2, dim=-1, keepdim=True) b1_normalized = b1 / b1_norms b2_normalized = b2 / b2_norms b1_normalized[b1_norms.expand(-1,c) == 0.0] = 0.0 b2_normalized[b2_norms.expand(-1,c) == 0.0] = 0.0 dot = (b1_normalized*b2_normalized).sum(1) omega = torch.acos(dot) so = torch.sin(omega) res = (torch.sin((1.0-r.squeeze(1))*omega)/so).unsqueeze(1)*b1_normalized + (torch.sin(r.squeeze(1)*omega)/so).unsqueeze(1) * b2_normalized res *= (b1_norms * (1.0-r) + b2_norms * r).expand(-1,c) res[dot > 1 - 1e-5] = b1[dot > 1 - 1e-5] res[dot < 1e-5 - 1] = (b1 * (1.0-r) + b2 * r)[dot < 1e-5 - 1] return res def generate_bilinear_data(length_old, length_new, device): coords_1 = torch.arange(length_old, dtype=torch.float32, device=device).reshape((1,1,1,-1)) coords_1 = torch.nn.functional.interpolate(coords_1, size=(1, length_new), mode="bilinear") ratios = coords_1 - coords_1.floor() coords_1 = coords_1.to(torch.int64) coords_2 = torch.arange(length_old, dtype=torch.float32, device=device).reshape((1,1,1,-1)) + 1 coords_2[:,:,:,-1] -= 1 coords_2 = torch.nn.functional.interpolate(coords_2, size=(1, length_new), mode="bilinear") coords_2 = coords_2.to(torch.int64) return ratios, coords_1, coords_2 orig_dtype = samples.dtype samples = samples.float() n,c,h,w = samples.shape h_new, w_new = (height, width) ratios, coords_1, coords_2 = generate_bilinear_data(w, w_new, samples.device) coords_1 = coords_1.expand((n, c, h, -1)) coords_2 = coords_2.expand((n, c, h, -1)) ratios = ratios.expand((n, 1, h, -1)) pass_1 = samples.gather(-1,coords_1).movedim(1, -1).reshape((-1,c)) pass_2 = samples.gather(-1,coords_2).movedim(1, -1).reshape((-1,c)) ratios = ratios.movedim(1, -1).reshape((-1,1)) result = slerp(pass_1, pass_2, ratios) result = result.reshape(n, h, w_new, c).movedim(-1, 1) ratios, coords_1, coords_2 = generate_bilinear_data(h, h_new, samples.device) coords_1 = coords_1.reshape((1,1,-1,1)).expand((n, c, -1, w_new)) coords_2 = coords_2.reshape((1,1,-1,1)).expand((n, c, -1, w_new)) ratios = ratios.reshape((1,1,-1,1)).expand((n, 1, -1, w_new)) pass_1 = result.gather(-2,coords_1).movedim(1, -1).reshape((-1,c)) pass_2 = result.gather(-2,coords_2).movedim(1, -1).reshape((-1,c)) ratios = ratios.movedim(1, -1).reshape((-1,1)) result = slerp(pass_1, pass_2, ratios) result = result.reshape(n, h_new, w_new, c).movedim(-1, 1) return result.to(orig_dtype) def lanczos(samples, width, height): images = [Image.fromarray(np.clip(255. * image.movedim(0, -1).cpu().numpy(), 0, 255).astype(np.uint8)) for image in samples] images = [image.resize((width, height), resample=Image.Resampling.LANCZOS) for image in images] images = [torch.from_numpy(np.array(image).astype(np.float32) / 255.0).movedim(-1, 0) for image in images] result = torch.stack(images) return result.to(samples.device, samples.dtype) def common_upscale(samples, width, height, upscale_method, crop): if crop == "center": old_width = samples.shape[3] old_height = samples.shape[2] old_aspect = old_width / old_height new_aspect = width / height x = 0 y = 0 if old_aspect > new_aspect: x = round((old_width - old_width * (new_aspect / old_aspect)) / 2) elif old_aspect < new_aspect: y = round((old_height - old_height * (old_aspect / new_aspect)) / 2) s = samples[:,:,y:old_height-y,x:old_width-x] else: s = samples if upscale_method == "bislerp": return bislerp(s, width, height) elif upscale_method == "lanczos": return lanczos(s, width, height) else: return torch.nn.functional.interpolate(s, size=(height, width), mode=upscale_method) def get_tiled_scale_steps(width, height, tile_x, tile_y, overlap): return math.ceil((height / (tile_y - overlap))) * math.ceil((width / (tile_x - overlap))) @torch.inference_mode() def tiled_scale_multidim(samples, function, tile=(64, 64), overlap = 8, upscale_amount = 4, out_channels = 3, output_device="cpu", pbar = None): dims = len(tile) output = torch.empty([samples.shape[0], out_channels] + list(map(lambda a: round(a * upscale_amount), samples.shape[2:])), device=output_device) for b in range(samples.shape[0]): s = samples[b:b+1] out = torch.zeros([s.shape[0], out_channels] + list(map(lambda a: round(a * upscale_amount), s.shape[2:])), device=output_device) out_div = torch.zeros([s.shape[0], out_channels] + list(map(lambda a: round(a * upscale_amount), s.shape[2:])), device=output_device) for it in itertools.product(*map(lambda a: range(0, a[0], a[1] - overlap), zip(s.shape[2:], tile))): s_in = s upscaled = [] for d in range(dims): pos = max(0, min(s.shape[d + 2] - overlap, it[d])) l = min(tile[d], s.shape[d + 2] - pos) s_in = s_in.narrow(d + 2, pos, l) upscaled.append(round(pos * upscale_amount)) ps = function(s_in).to(output_device) mask = torch.ones_like(ps) feather = round(overlap * upscale_amount) for t in range(feather): for d in range(2, dims + 2): m = mask.narrow(d, t, 1) m *= ((1.0/feather) * (t + 1)) m = mask.narrow(d, mask.shape[d] -1 -t, 1) m *= ((1.0/feather) * (t + 1)) o = out o_d = out_div for d in range(dims): o = o.narrow(d + 2, upscaled[d], mask.shape[d + 2]) o_d = o_d.narrow(d + 2, upscaled[d], mask.shape[d + 2]) o += ps * mask o_d += mask if pbar is not None: pbar.update(1) output[b:b+1] = out/out_div return output def tiled_scale(samples, function, tile_x=64, tile_y=64, overlap = 8, upscale_amount = 4, out_channels = 3, output_device="cpu", pbar = None): return tiled_scale_multidim(samples, function, (tile_y, tile_x), overlap, upscale_amount, out_channels, output_device, pbar) PROGRESS_BAR_ENABLED = True def set_progress_bar_enabled(enabled): global PROGRESS_BAR_ENABLED PROGRESS_BAR_ENABLED = enabled PROGRESS_BAR_HOOK = None def set_progress_bar_global_hook(function): global PROGRESS_BAR_HOOK PROGRESS_BAR_HOOK = function class ProgressBar: def __init__(self, total): global PROGRESS_BAR_HOOK self.total = total self.current = 0 self.hook = PROGRESS_BAR_HOOK def update_absolute(self, value, total=None, preview=None): if total is not None: self.total = total if value > self.total: value = self.total self.current = value if self.hook is not None: self.hook(self.current, self.total, preview) def update(self, value): self.update_absolute(self.current + value)
import torch import torch as th import torch.nn as nn from ..ldm.modules.diffusionmodules.util import ( zero_module, timestep_embedding, ) from ..ldm.modules.attention import SpatialTransformer from ..ldm.modules.diffusionmodules.openaimodel import UNetModel, TimestepEmbedSequential, ResBlock, Downsample from ..ldm.util import exists import comfy.ops class ControlledUnetModel(UNetModel): pass class ControlNet(nn.Module): def __init__( self, image_size, in_channels, model_channels, hint_channels, num_res_blocks, dropout=0, channel_mult=(1, 2, 4, 8), conv_resample=True, dims=2, num_classes=None, use_checkpoint=False, dtype=torch.float32, num_heads=-1, num_head_channels=-1, num_heads_upsample=-1, use_scale_shift_norm=False, resblock_updown=False, use_new_attention_order=False, use_spatial_transformer=False, transformer_depth=1, context_dim=None, n_embed=None, legacy=True, disable_self_attentions=None, num_attention_blocks=None, disable_middle_self_attn=False, use_linear_in_transformer=False, adm_in_channels=None, transformer_depth_middle=None, transformer_depth_output=None, attn_precision=None, device=None, operations=comfy.ops.disable_weight_init, **kwargs, ): super().__init__() assert use_spatial_transformer == True, "use_spatial_transformer has to be true" if use_spatial_transformer: assert context_dim is not None, 'Fool!! You forgot to include the dimension of your cross-attention conditioning...' if context_dim is not None: assert use_spatial_transformer, 'Fool!! You forgot to use the spatial transformer for your cross-attention conditioning...' if num_heads_upsample == -1: num_heads_upsample = num_heads if num_heads == -1: assert num_head_channels != -1, 'Either num_heads or num_head_channels has to be set' if num_head_channels == -1: assert num_heads != -1, 'Either num_heads or num_head_channels has to be set' self.dims = dims self.image_size = image_size self.in_channels = in_channels self.model_channels = model_channels if isinstance(num_res_blocks, int): self.num_res_blocks = len(channel_mult) * [num_res_blocks] else: if len(num_res_blocks) != len(channel_mult): raise ValueError("provide num_res_blocks either as an int (globally constant) or " "as a list/tuple (per-level) with the same length as channel_mult") self.num_res_blocks = num_res_blocks if disable_self_attentions is not None: assert len(disable_self_attentions) == len(channel_mult) if num_attention_blocks is not None: assert len(num_attention_blocks) == len(self.num_res_blocks) assert all(map(lambda i: self.num_res_blocks[i] >= num_attention_blocks[i], range(len(num_attention_blocks)))) transformer_depth = transformer_depth[:] self.dropout = dropout self.channel_mult = channel_mult self.conv_resample = conv_resample self.num_classes = num_classes self.use_checkpoint = use_checkpoint self.dtype = dtype self.num_heads = num_heads self.num_head_channels = num_head_channels self.num_heads_upsample = num_heads_upsample self.predict_codebook_ids = n_embed is not None time_embed_dim = model_channels * 4 self.time_embed = nn.Sequential( operations.Linear(model_channels, time_embed_dim, dtype=self.dtype, device=device), nn.SiLU(), operations.Linear(time_embed_dim, time_embed_dim, dtype=self.dtype, device=device), ) if self.num_classes is not None: if isinstance(self.num_classes, int): self.label_emb = nn.Embedding(num_classes, time_embed_dim) elif self.num_classes == "continuous": print("setting up linear c_adm embedding layer") self.label_emb = nn.Linear(1, time_embed_dim) elif self.num_classes == "sequential": assert adm_in_channels is not None self.label_emb = nn.Sequential( nn.Sequential( operations.Linear(adm_in_channels, time_embed_dim, dtype=self.dtype, device=device), nn.SiLU(), operations.Linear(time_embed_dim, time_embed_dim, dtype=self.dtype, device=device), ) ) else: raise ValueError() self.input_blocks = nn.ModuleList( [ TimestepEmbedSequential( operations.conv_nd(dims, in_channels, model_channels, 3, padding=1, dtype=self.dtype, device=device) ) ] ) self.zero_convs = nn.ModuleList([self.make_zero_conv(model_channels, operations=operations, dtype=self.dtype, device=device)]) self.input_hint_block = TimestepEmbedSequential( operations.conv_nd(dims, hint_channels, 16, 3, padding=1, dtype=self.dtype, device=device), nn.SiLU(), operations.conv_nd(dims, 16, 16, 3, padding=1, dtype=self.dtype, device=device), nn.SiLU(), operations.conv_nd(dims, 16, 32, 3, padding=1, stride=2, dtype=self.dtype, device=device), nn.SiLU(), operations.conv_nd(dims, 32, 32, 3, padding=1, dtype=self.dtype, device=device), nn.SiLU(), operations.conv_nd(dims, 32, 96, 3, padding=1, stride=2, dtype=self.dtype, device=device), nn.SiLU(), operations.conv_nd(dims, 96, 96, 3, padding=1, dtype=self.dtype, device=device), nn.SiLU(), operations.conv_nd(dims, 96, 256, 3, padding=1, stride=2, dtype=self.dtype, device=device), nn.SiLU(), operations.conv_nd(dims, 256, model_channels, 3, padding=1, dtype=self.dtype, device=device) ) self._feature_size = model_channels input_block_chans = [model_channels] ch = model_channels ds = 1 for level, mult in enumerate(channel_mult): for nr in range(self.num_res_blocks[level]): layers = [ ResBlock( ch, time_embed_dim, dropout, out_channels=mult * model_channels, dims=dims, use_checkpoint=use_checkpoint, use_scale_shift_norm=use_scale_shift_norm, dtype=self.dtype, device=device, operations=operations, ) ] ch = mult * model_channels num_transformers = transformer_depth.pop(0) if num_transformers > 0: if num_head_channels == -1: dim_head = ch else: num_heads = ch dim_head = num_head_channels if legacy: dim_head = ch if exists(disable_self_attentions): disabled_sa = disable_self_attentions[level] else: disabled_sa = False if not exists(num_attention_blocks) or nr < num_attention_blocks[level]: layers.append( SpatialTransformer( ch, num_heads, dim_head, depth=num_transformers, context_dim=context_dim, disable_self_attn=disabled_sa, use_linear=use_linear_in_transformer, use_checkpoint=use_checkpoint, attn_precision=attn_precision, dtype=self.dtype, device=device, operations=operations ) ) self.input_blocks.append(TimestepEmbedSequential(*layers)) self.zero_convs.append(self.make_zero_conv(ch, operations=operations, dtype=self.dtype, device=device)) self._feature_size += ch input_block_chans.append(ch) if level != len(channel_mult) - 1: out_ch = ch self.input_blocks.append( TimestepEmbedSequential( ResBlock( ch, time_embed_dim, dropout, out_channels=out_ch, dims=dims, use_checkpoint=use_checkpoint, use_scale_shift_norm=use_scale_shift_norm, down=True, dtype=self.dtype, device=device, operations=operations ) if resblock_updown else Downsample( ch, conv_resample, dims=dims, out_channels=out_ch, dtype=self.dtype, device=device, operations=operations ) ) ) ch = out_ch input_block_chans.append(ch) self.zero_convs.append(self.make_zero_conv(ch, operations=operations, dtype=self.dtype, device=device)) ds *= 2 self._feature_size += ch if num_head_channels == -1: dim_head = ch else: num_heads = ch dim_head = num_head_channels if legacy: dim_head = ch mid_block = [ ResBlock( ch, time_embed_dim, dropout, dims=dims, use_checkpoint=use_checkpoint, use_scale_shift_norm=use_scale_shift_norm, dtype=self.dtype, device=device, operations=operations )] if transformer_depth_middle >= 0: mid_block += [SpatialTransformer( ch, num_heads, dim_head, depth=transformer_depth_middle, context_dim=context_dim, disable_self_attn=disable_middle_self_attn, use_linear=use_linear_in_transformer, use_checkpoint=use_checkpoint, attn_precision=attn_precision, dtype=self.dtype, device=device, operations=operations ), ResBlock( ch, time_embed_dim, dropout, dims=dims, use_checkpoint=use_checkpoint, use_scale_shift_norm=use_scale_shift_norm, dtype=self.dtype, device=device, operations=operations )] self.middle_block = TimestepEmbedSequential(*mid_block) self.middle_block_out = self.make_zero_conv(ch, operations=operations, dtype=self.dtype, device=device) self._feature_size += ch def make_zero_conv(self, channels, operations=None, dtype=None, device=None): return TimestepEmbedSequential(operations.conv_nd(self.dims, channels, channels, 1, padding=0, dtype=dtype, device=device)) def forward(self, x, hint, timesteps, context, y=None, **kwargs): t_emb = timestep_embedding(timesteps, self.model_channels, repeat_only=False).to(x.dtype) emb = self.time_embed(t_emb) guided_hint = self.input_hint_block(hint, emb, context) outs = [] hs = [] if self.num_classes is not None: assert y.shape[0] == x.shape[0] emb = emb + self.label_emb(y) h = x for module, zero_conv in zip(self.input_blocks, self.zero_convs): if guided_hint is not None: h = module(h, emb, context) h += guided_hint guided_hint = None else: h = module(h, emb, context) outs.append(zero_conv(h, emb, context)) h = self.middle_block(h, emb, context) outs.append(self.middle_block_out(h, emb, context)) return outs
import torch import torch.nn.functional as F import math from tqdm.auto import trange, tqdm class NoiseScheduleVP: def __init__( self, schedule='discrete', betas=None, alphas_cumprod=None, continuous_beta_0=0.1, continuous_beta_1=20., ): """Create a wrapper class for the forward SDE (VP type). *** Update: We support discrete-time diffusion models by implementing a picewise linear interpolation for log_alpha_t. We recommend to use schedule='discrete' for the discrete-time diffusion models, especially for high-resolution images. *** The forward SDE ensures that the condition distribution q_{t|0}(x_t | x_0) = N ( alpha_t * x_0, sigma_t^2 * I ). We further define lambda_t = log(alpha_t) - log(sigma_t), which is the half-logSNR (described in the DPM-Solver paper). Therefore, we implement the functions for computing alpha_t, sigma_t and lambda_t. For t in [0, T], we have: log_alpha_t = self.marginal_log_mean_coeff(t) sigma_t = self.marginal_std(t) lambda_t = self.marginal_lambda(t) Moreover, as lambda(t) is an invertible function, we also support its inverse function: t = self.inverse_lambda(lambda_t) =============================================================== We support both discrete-time DPMs (trained on n = 0, 1, ..., N-1) and continuous-time DPMs (trained on t in [t_0, T]). 1. For discrete-time DPMs: For discrete-time DPMs trained on n = 0, 1, ..., N-1, we convert the discrete steps to continuous time steps by: t_i = (i + 1) / N e.g. for N = 1000, we have t_0 = 1e-3 and T = t_{N-1} = 1. We solve the corresponding diffusion ODE from time T = 1 to time t_0 = 1e-3. Args: betas: A `torch.Tensor`. The beta array for the discrete-time DPM. (See the original DDPM paper for details) alphas_cumprod: A `torch.Tensor`. The cumprod alphas for the discrete-time DPM. (See the original DDPM paper for details) Note that we always have alphas_cumprod = cumprod(betas). Therefore, we only need to set one of `betas` and `alphas_cumprod`. **Important**: Please pay special attention for the args for `alphas_cumprod`: The `alphas_cumprod` is the \hat{alpha_n} arrays in the notations of DDPM. Specifically, DDPMs assume that q_{t_n | 0}(x_{t_n} | x_0) = N ( \sqrt{\hat{alpha_n}} * x_0, (1 - \hat{alpha_n}) * I ). Therefore, the notation \hat{alpha_n} is different from the notation alpha_t in DPM-Solver. In fact, we have alpha_{t_n} = \sqrt{\hat{alpha_n}}, and log(alpha_{t_n}) = 0.5 * log(\hat{alpha_n}). 2. For continuous-time DPMs: We support two types of VPSDEs: linear (DDPM) and cosine (improved-DDPM). The hyperparameters for the noise schedule are the default settings in DDPM and improved-DDPM: Args: beta_min: A `float` number. The smallest beta for the linear schedule. beta_max: A `float` number. The largest beta for the linear schedule. cosine_s: A `float` number. The hyperparameter in the cosine schedule. cosine_beta_max: A `float` number. The hyperparameter in the cosine schedule. T: A `float` number. The ending time of the forward process. =============================================================== Args: schedule: A `str`. The noise schedule of the forward SDE. 'discrete' for discrete-time DPMs, 'linear' or 'cosine' for continuous-time DPMs. Returns: A wrapper object of the forward SDE (VP type). =============================================================== Example: >>> ns = NoiseScheduleVP('discrete', betas=betas) >>> ns = NoiseScheduleVP('discrete', alphas_cumprod=alphas_cumprod) >>> ns = NoiseScheduleVP('linear', continuous_beta_0=0.1, continuous_beta_1=20.) """ if schedule not in ['discrete', 'linear', 'cosine']: raise ValueError("Unsupported noise schedule {}. The schedule needs to be 'discrete' or 'linear' or 'cosine'".format(schedule)) self.schedule = schedule if schedule == 'discrete': if betas is not None: log_alphas = 0.5 * torch.log(1 - betas).cumsum(dim=0) else: assert alphas_cumprod is not None log_alphas = 0.5 * torch.log(alphas_cumprod) self.total_N = len(log_alphas) self.T = 1. self.t_array = torch.linspace(0., 1., self.total_N + 1)[1:].reshape((1, -1)) self.log_alpha_array = log_alphas.reshape((1, -1,)) else: self.total_N = 1000 self.beta_0 = continuous_beta_0 self.beta_1 = continuous_beta_1 self.cosine_s = 0.008 self.cosine_beta_max = 999. self.cosine_t_max = math.atan(self.cosine_beta_max * (1. + self.cosine_s) / math.pi) * 2. * (1. + self.cosine_s) / math.pi - self.cosine_s self.cosine_log_alpha_0 = math.log(math.cos(self.cosine_s / (1. + self.cosine_s) * math.pi / 2.)) self.schedule = schedule if schedule == 'cosine': self.T = 0.9946 else: self.T = 1. def marginal_log_mean_coeff(self, t): """ Compute log(alpha_t) of a given continuous-time label t in [0, T]. """ if self.schedule == 'discrete': return interpolate_fn(t.reshape((-1, 1)), self.t_array.to(t.device), self.log_alpha_array.to(t.device)).reshape((-1)) elif self.schedule == 'linear': return -0.25 * t ** 2 * (self.beta_1 - self.beta_0) - 0.5 * t * self.beta_0 elif self.schedule == 'cosine': log_alpha_fn = lambda s: torch.log(torch.cos((s + self.cosine_s) / (1. + self.cosine_s) * math.pi / 2.)) log_alpha_t = log_alpha_fn(t) - self.cosine_log_alpha_0 return log_alpha_t def marginal_alpha(self, t): """ Compute alpha_t of a given continuous-time label t in [0, T]. """ return torch.exp(self.marginal_log_mean_coeff(t)) def marginal_std(self, t): """ Compute sigma_t of a given continuous-time label t in [0, T]. """ return torch.sqrt(1. - torch.exp(2. * self.marginal_log_mean_coeff(t))) def marginal_lambda(self, t): """ Compute lambda_t = log(alpha_t) - log(sigma_t) of a given continuous-time label t in [0, T]. """ log_mean_coeff = self.marginal_log_mean_coeff(t) log_std = 0.5 * torch.log(1. - torch.exp(2. * log_mean_coeff)) return log_mean_coeff - log_std def inverse_lambda(self, lamb): """ Compute the continuous-time label t in [0, T] of a given half-logSNR lambda_t. """ if self.schedule == 'linear': tmp = 2. * (self.beta_1 - self.beta_0) * torch.logaddexp(-2. * lamb, torch.zeros((1,)).to(lamb)) Delta = self.beta_0**2 + tmp return tmp / (torch.sqrt(Delta) + self.beta_0) / (self.beta_1 - self.beta_0) elif self.schedule == 'discrete': log_alpha = -0.5 * torch.logaddexp(torch.zeros((1,)).to(lamb.device), -2. * lamb) t = interpolate_fn(log_alpha.reshape((-1, 1)), torch.flip(self.log_alpha_array.to(lamb.device), [1]), torch.flip(self.t_array.to(lamb.device), [1])) return t.reshape((-1,)) else: log_alpha = -0.5 * torch.logaddexp(-2. * lamb, torch.zeros((1,)).to(lamb)) t_fn = lambda log_alpha_t: torch.arccos(torch.exp(log_alpha_t + self.cosine_log_alpha_0)) * 2. * (1. + self.cosine_s) / math.pi - self.cosine_s t = t_fn(log_alpha) return t def model_wrapper( model, noise_schedule, model_type="noise", model_kwargs={}, guidance_type="uncond", condition=None, unconditional_condition=None, guidance_scale=1., classifier_fn=None, classifier_kwargs={}, ): """Create a wrapper function for the noise prediction model. DPM-Solver needs to solve the continuous-time diffusion ODEs. For DPMs trained on discrete-time labels, we need to firstly wrap the model function to a noise prediction model that accepts the continuous time as the input. We support four types of the diffusion model by setting `model_type`: 1. "noise": noise prediction model. (Trained by predicting noise). 2. "x_start": data prediction model. (Trained by predicting the data x_0 at time 0). 3. "v": velocity prediction model. (Trained by predicting the velocity). The "v" prediction is derivation detailed in Appendix D of [1], and is used in Imagen-Video [2]. [1] Salimans, Tim, and Jonathan Ho. "Progressive distillation for fast sampling of diffusion models." arXiv preprint arXiv:2202.00512 (2022). [2] Ho, Jonathan, et al. "Imagen Video: High Definition Video Generation with Diffusion Models." arXiv preprint arXiv:2210.02303 (2022). 4. "score": marginal score function. (Trained by denoising score matching). Note that the score function and the noise prediction model follows a simple relationship: ``` noise(x_t, t) = -sigma_t * score(x_t, t) ``` We support three types of guided sampling by DPMs by setting `guidance_type`: 1. "uncond": unconditional sampling by DPMs. The input `model` has the following format: `` model(x, t_input, **model_kwargs) -> noise | x_start | v | score `` 2. "classifier": classifier guidance sampling [3] by DPMs and another classifier. The input `model` has the following format: `` model(x, t_input, **model_kwargs) -> noise | x_start | v | score `` The input `classifier_fn` has the following format: `` classifier_fn(x, t_input, cond, **classifier_kwargs) -> logits(x, t_input, cond) `` [3] P. Dhariwal and A. Q. Nichol, "Diffusion models beat GANs on image synthesis," in Advances in Neural Information Processing Systems, vol. 34, 2021, pp. 8780-8794. 3. "classifier-free": classifier-free guidance sampling by conditional DPMs. The input `model` has the following format: `` model(x, t_input, cond, **model_kwargs) -> noise | x_start | v | score `` And if cond == `unconditional_condition`, the model output is the unconditional DPM output. [4] Ho, Jonathan, and Tim Salimans. "Classifier-free diffusion guidance." arXiv preprint arXiv:2207.12598 (2022). The `t_input` is the time label of the model, which may be discrete-time labels (i.e. 0 to 999) or continuous-time labels (i.e. epsilon to T). We wrap the model function to accept only `x` and `t_continuous` as inputs, and outputs the predicted noise: `` def model_fn(x, t_continuous) -> noise: t_input = get_model_input_time(t_continuous) return noise_pred(model, x, t_input, **model_kwargs) `` where `t_continuous` is the continuous time labels (i.e. epsilon to T). And we use `model_fn` for DPM-Solver. =============================================================== Args: model: A diffusion model with the corresponding format described above. noise_schedule: A noise schedule object, such as NoiseScheduleVP. model_type: A `str`. The parameterization type of the diffusion model. "noise" or "x_start" or "v" or "score". model_kwargs: A `dict`. A dict for the other inputs of the model function. guidance_type: A `str`. The type of the guidance for sampling. "uncond" or "classifier" or "classifier-free". condition: A pytorch tensor. The condition for the guided sampling. Only used for "classifier" or "classifier-free" guidance type. unconditional_condition: A pytorch tensor. The condition for the unconditional sampling. Only used for "classifier-free" guidance type. guidance_scale: A `float`. The scale for the guided sampling. classifier_fn: A classifier function. Only used for the classifier guidance. classifier_kwargs: A `dict`. A dict for the other inputs of the classifier function. Returns: A noise prediction model that accepts the noised data and the continuous time as the inputs. """ def get_model_input_time(t_continuous): """ Convert the continuous-time `t_continuous` (in [epsilon, T]) to the model input time. For discrete-time DPMs, we convert `t_continuous` in [1 / N, 1] to `t_input` in [0, 1000 * (N - 1) / N]. For continuous-time DPMs, we just use `t_continuous`. """ if noise_schedule.schedule == 'discrete': return (t_continuous - 1. / noise_schedule.total_N) * 1000. else: return t_continuous def noise_pred_fn(x, t_continuous, cond=None): if t_continuous.reshape((-1,)).shape[0] == 1: t_continuous = t_continuous.expand((x.shape[0])) t_input = get_model_input_time(t_continuous) output = model(x, t_input, **model_kwargs) if model_type == "noise": return output elif model_type == "x_start": alpha_t, sigma_t = noise_schedule.marginal_alpha(t_continuous), noise_schedule.marginal_std(t_continuous) dims = x.dim() return (x - expand_dims(alpha_t, dims) * output) / expand_dims(sigma_t, dims) elif model_type == "v": alpha_t, sigma_t = noise_schedule.marginal_alpha(t_continuous), noise_schedule.marginal_std(t_continuous) dims = x.dim() return expand_dims(alpha_t, dims) * output + expand_dims(sigma_t, dims) * x elif model_type == "score": sigma_t = noise_schedule.marginal_std(t_continuous) dims = x.dim() return -expand_dims(sigma_t, dims) * output def cond_grad_fn(x, t_input): """ Compute the gradient of the classifier, i.e. nabla_{x} log p_t(cond | x_t). """ with torch.enable_grad(): x_in = x.detach().requires_grad_(True) log_prob = classifier_fn(x_in, t_input, condition, **classifier_kwargs) return torch.autograd.grad(log_prob.sum(), x_in)[0] def model_fn(x, t_continuous): """ The noise predicition model function that is used for DPM-Solver. """ if t_continuous.reshape((-1,)).shape[0] == 1: t_continuous = t_continuous.expand((x.shape[0])) if guidance_type == "uncond": return noise_pred_fn(x, t_continuous) elif guidance_type == "classifier": assert classifier_fn is not None t_input = get_model_input_time(t_continuous) cond_grad = cond_grad_fn(x, t_input) sigma_t = noise_schedule.marginal_std(t_continuous) noise = noise_pred_fn(x, t_continuous) return noise - guidance_scale * expand_dims(sigma_t, dims=cond_grad.dim()) * cond_grad elif guidance_type == "classifier-free": if guidance_scale == 1. or unconditional_condition is None: return noise_pred_fn(x, t_continuous, cond=condition) else: x_in = torch.cat([x] * 2) t_in = torch.cat([t_continuous] * 2) c_in = torch.cat([unconditional_condition, condition]) noise_uncond, noise = noise_pred_fn(x_in, t_in, cond=c_in).chunk(2) return noise_uncond + guidance_scale * (noise - noise_uncond) assert model_type in ["noise", "x_start", "v"] assert guidance_type in ["uncond", "classifier", "classifier-free"] return model_fn class UniPC: def __init__( self, model_fn, noise_schedule, predict_x0=True, thresholding=False, max_val=1., variant='bh1', ): """Construct a UniPC. We support both data_prediction and noise_prediction. """ self.model = model_fn self.noise_schedule = noise_schedule self.variant = variant self.predict_x0 = predict_x0 self.thresholding = thresholding self.max_val = max_val def dynamic_thresholding_fn(self, x0, t=None): """ The dynamic thresholding method. """ dims = x0.dim() p = self.dynamic_thresholding_ratio s = torch.quantile(torch.abs(x0).reshape((x0.shape[0], -1)), p, dim=1) s = expand_dims(torch.maximum(s, self.thresholding_max_val * torch.ones_like(s).to(s.device)), dims) x0 = torch.clamp(x0, -s, s) / s return x0 def noise_prediction_fn(self, x, t): """ Return the noise prediction model. """ return self.model(x, t) def data_prediction_fn(self, x, t): """ Return the data prediction model (with thresholding). """ noise = self.noise_prediction_fn(x, t) dims = x.dim() alpha_t, sigma_t = self.noise_schedule.marginal_alpha(t), self.noise_schedule.marginal_std(t) x0 = (x - expand_dims(sigma_t, dims) * noise) / expand_dims(alpha_t, dims) if self.thresholding: p = 0.995 s = torch.quantile(torch.abs(x0).reshape((x0.shape[0], -1)), p, dim=1) s = expand_dims(torch.maximum(s, self.max_val * torch.ones_like(s).to(s.device)), dims) x0 = torch.clamp(x0, -s, s) / s return x0 def model_fn(self, x, t): """ Convert the model to the noise prediction model or the data prediction model. """ if self.predict_x0: return self.data_prediction_fn(x, t) else: return self.noise_prediction_fn(x, t) def get_time_steps(self, skip_type, t_T, t_0, N, device): """Compute the intermediate time steps for sampling. """ if skip_type == 'logSNR': lambda_T = self.noise_schedule.marginal_lambda(torch.tensor(t_T).to(device)) lambda_0 = self.noise_schedule.marginal_lambda(torch.tensor(t_0).to(device)) logSNR_steps = torch.linspace(lambda_T.cpu().item(), lambda_0.cpu().item(), N + 1).to(device) return self.noise_schedule.inverse_lambda(logSNR_steps) elif skip_type == 'time_uniform': return torch.linspace(t_T, t_0, N + 1).to(device) elif skip_type == 'time_quadratic': t_order = 2 t = torch.linspace(t_T**(1. / t_order), t_0**(1. / t_order), N + 1).pow(t_order).to(device) return t else: raise ValueError("Unsupported skip_type {}, need to be 'logSNR' or 'time_uniform' or 'time_quadratic'".format(skip_type)) def get_orders_and_timesteps_for_singlestep_solver(self, steps, order, skip_type, t_T, t_0, device): """ Get the order of each step for sampling by the singlestep DPM-Solver. """ if order == 3: K = steps if steps % 3 == 0: orders = [3,] * (K - 2) + [2, 1] elif steps % 3 == 1: orders = [3,] * (K - 1) + [1] else: orders = [3,] * (K - 1) + [2] elif order == 2: if steps % 2 == 0: K = steps orders = [2,] * K else: K = steps orders = [2,] * (K - 1) + [1] elif order == 1: K = steps orders = [1,] * steps else: raise ValueError("'order' must be '1' or '2' or '3'.") if skip_type == 'logSNR': timesteps_outer = self.get_time_steps(skip_type, t_T, t_0, K, device) else: timesteps_outer = self.get_time_steps(skip_type, t_T, t_0, steps, device)[torch.cumsum(torch.tensor([0,] + orders), 0).to(device)] return timesteps_outer, orders def denoise_to_zero_fn(self, x, s): """ Denoise at the final step, which is equivalent to solve the ODE from lambda_s to infty by first-order discretization. """ return self.data_prediction_fn(x, s) def multistep_uni_pc_update(self, x, model_prev_list, t_prev_list, t, order, **kwargs): if len(t.shape) == 0: t = t.view(-1) if 'bh' in self.variant: return self.multistep_uni_pc_bh_update(x, model_prev_list, t_prev_list, t, order, **kwargs) else: assert self.variant == 'vary_coeff' return self.multistep_uni_pc_vary_update(x, model_prev_list, t_prev_list, t, order, **kwargs) def multistep_uni_pc_vary_update(self, x, model_prev_list, t_prev_list, t, order, use_corrector=True): print(f'using unified predictor-corrector with order {order} (solver type: vary coeff)') ns = self.noise_schedule assert order <= len(model_prev_list) t_prev_0 = t_prev_list[-1] lambda_prev_0 = ns.marginal_lambda(t_prev_0) lambda_t = ns.marginal_lambda(t) model_prev_0 = model_prev_list[-1] sigma_prev_0, sigma_t = ns.marginal_std(t_prev_0), ns.marginal_std(t) log_alpha_t = ns.marginal_log_mean_coeff(t) alpha_t = torch.exp(log_alpha_t) h = lambda_t - lambda_prev_0 rks = [] D1s = [] for i in range(1, order): t_prev_i = t_prev_list[-(i + 1)] model_prev_i = model_prev_list[-(i + 1)] lambda_prev_i = ns.marginal_lambda(t_prev_i) rk = (lambda_prev_i - lambda_prev_0) / h rks.append(rk) D1s.append((model_prev_i - model_prev_0) / rk) rks.append(1.) rks = torch.tensor(rks, device=x.device) K = len(rks) C = [] col = torch.ones_like(rks) for k in range(1, K + 1): C.append(col) col = col * rks / (k + 1) C = torch.stack(C, dim=1) if len(D1s) > 0: D1s = torch.stack(D1s, dim=1) C_inv_p = torch.linalg.inv(C[:-1, :-1]) A_p = C_inv_p if use_corrector: print('using corrector') C_inv = torch.linalg.inv(C) A_c = C_inv hh = -h if self.predict_x0 else h h_phi_1 = torch.expm1(hh) h_phi_ks = [] factorial_k = 1 h_phi_k = h_phi_1 for k in range(1, K + 2): h_phi_ks.append(h_phi_k) h_phi_k = h_phi_k / hh - 1 / factorial_k factorial_k *= (k + 1) model_t = None if self.predict_x0: x_t_ = ( sigma_t / sigma_prev_0 * x - alpha_t * h_phi_1 * model_prev_0 ) x_t = x_t_ if len(D1s) > 0: for k in range(K - 1): x_t = x_t - alpha_t * h_phi_ks[k + 1] * torch.einsum('bkchw,k->bchw', D1s, A_p[k]) if use_corrector: model_t = self.model_fn(x_t, t) D1_t = (model_t - model_prev_0) x_t = x_t_ k = 0 for k in range(K - 1): x_t = x_t - alpha_t * h_phi_ks[k + 1] * torch.einsum('bkchw,k->bchw', D1s, A_c[k][:-1]) x_t = x_t - alpha_t * h_phi_ks[K] * (D1_t * A_c[k][-1]) else: log_alpha_prev_0, log_alpha_t = ns.marginal_log_mean_coeff(t_prev_0), ns.marginal_log_mean_coeff(t) x_t_ = ( (torch.exp(log_alpha_t - log_alpha_prev_0)) * x - (sigma_t * h_phi_1) * model_prev_0 ) x_t = x_t_ if len(D1s) > 0: for k in range(K - 1): x_t = x_t - sigma_t * h_phi_ks[k + 1] * torch.einsum('bkchw,k->bchw', D1s, A_p[k]) if use_corrector: model_t = self.model_fn(x_t, t) D1_t = (model_t - model_prev_0) x_t = x_t_ k = 0 for k in range(K - 1): x_t = x_t - sigma_t * h_phi_ks[k + 1] * torch.einsum('bkchw,k->bchw', D1s, A_c[k][:-1]) x_t = x_t - sigma_t * h_phi_ks[K] * (D1_t * A_c[k][-1]) return x_t, model_t def multistep_uni_pc_bh_update(self, x, model_prev_list, t_prev_list, t, order, x_t=None, use_corrector=True): ns = self.noise_schedule assert order <= len(model_prev_list) dims = x.dim() t_prev_0 = t_prev_list[-1] lambda_prev_0 = ns.marginal_lambda(t_prev_0) lambda_t = ns.marginal_lambda(t) model_prev_0 = model_prev_list[-1] sigma_prev_0, sigma_t = ns.marginal_std(t_prev_0), ns.marginal_std(t) log_alpha_prev_0, log_alpha_t = ns.marginal_log_mean_coeff(t_prev_0), ns.marginal_log_mean_coeff(t) alpha_t = torch.exp(log_alpha_t) h = lambda_t - lambda_prev_0 rks = [] D1s = [] for i in range(1, order): t_prev_i = t_prev_list[-(i + 1)] model_prev_i = model_prev_list[-(i + 1)] lambda_prev_i = ns.marginal_lambda(t_prev_i) rk = ((lambda_prev_i - lambda_prev_0) / h)[0] rks.append(rk) D1s.append((model_prev_i - model_prev_0) / rk) rks.append(1.) rks = torch.tensor(rks, device=x.device) R = [] b = [] hh = -h[0] if self.predict_x0 else h[0] h_phi_1 = torch.expm1(hh) h_phi_k = h_phi_1 / hh - 1 factorial_i = 1 if self.variant == 'bh1': B_h = hh elif self.variant == 'bh2': B_h = torch.expm1(hh) else: raise NotImplementedError() for i in range(1, order + 1): R.append(torch.pow(rks, i - 1)) b.append(h_phi_k * factorial_i / B_h) factorial_i *= (i + 1) h_phi_k = h_phi_k / hh - 1 / factorial_i R = torch.stack(R) b = torch.tensor(b, device=x.device) use_predictor = len(D1s) > 0 and x_t is None if len(D1s) > 0: D1s = torch.stack(D1s, dim=1) if x_t is None: if order == 2: rhos_p = torch.tensor([0.5], device=b.device) else: rhos_p = torch.linalg.solve(R[:-1, :-1], b[:-1]) else: D1s = None if use_corrector: if order == 1: rhos_c = torch.tensor([0.5], device=b.device) else: rhos_c = torch.linalg.solve(R, b) model_t = None if self.predict_x0: x_t_ = ( expand_dims(sigma_t / sigma_prev_0, dims) * x - expand_dims(alpha_t * h_phi_1, dims)* model_prev_0 ) if x_t is None: if use_predictor: pred_res = torch.einsum('k,bkchw->bchw', rhos_p, D1s) else: pred_res = 0 x_t = x_t_ - expand_dims(alpha_t * B_h, dims) * pred_res if use_corrector: model_t = self.model_fn(x_t, t) if D1s is not None: corr_res = torch.einsum('k,bkchw->bchw', rhos_c[:-1], D1s) else: corr_res = 0 D1_t = (model_t - model_prev_0) x_t = x_t_ - expand_dims(alpha_t * B_h, dims) * (corr_res + rhos_c[-1] * D1_t) else: x_t_ = ( expand_dims(torch.exp(log_alpha_t - log_alpha_prev_0), dims) * x - expand_dims(sigma_t * h_phi_1, dims) * model_prev_0 ) if x_t is None: if use_predictor: pred_res = torch.einsum('k,bkchw->bchw', rhos_p, D1s) else: pred_res = 0 x_t = x_t_ - expand_dims(sigma_t * B_h, dims) * pred_res if use_corrector: model_t = self.model_fn(x_t, t) if D1s is not None: corr_res = torch.einsum('k,bkchw->bchw', rhos_c[:-1], D1s) else: corr_res = 0 D1_t = (model_t - model_prev_0) x_t = x_t_ - expand_dims(sigma_t * B_h, dims) * (corr_res + rhos_c[-1] * D1_t) return x_t, model_t def sample(self, x, timesteps, t_start=None, t_end=None, order=3, skip_type='time_uniform', method='singlestep', lower_order_final=True, denoise_to_zero=False, solver_type='dpm_solver', atol=0.0078, rtol=0.05, corrector=False, callback=None, disable_pbar=False ): device = x.device steps = len(timesteps) - 1 if method == 'multistep': assert steps >= order assert timesteps.shape[0] - 1 == steps for step_index in trange(steps, disable=disable_pbar): if step_index == 0: vec_t = timesteps[0].expand((x.shape[0])) model_prev_list = [self.model_fn(x, vec_t)] t_prev_list = [vec_t] elif step_index < order: init_order = step_index vec_t = timesteps[init_order].expand(x.shape[0]) x, model_x = self.multistep_uni_pc_update(x, model_prev_list, t_prev_list, vec_t, init_order, use_corrector=True) if model_x is None: model_x = self.model_fn(x, vec_t) model_prev_list.append(model_x) t_prev_list.append(vec_t) else: extra_final_step = 0 if step_index == (steps - 1): extra_final_step = 1 for step in range(step_index, step_index + 1 + extra_final_step): vec_t = timesteps[step].expand(x.shape[0]) if lower_order_final: step_order = min(order, steps + 1 - step) else: step_order = order if step == steps: use_corrector = False else: use_corrector = True x, model_x = self.multistep_uni_pc_update(x, model_prev_list, t_prev_list, vec_t, step_order, use_corrector=use_corrector) for i in range(order - 1): t_prev_list[i] = t_prev_list[i + 1] model_prev_list[i] = model_prev_list[i + 1] t_prev_list[-1] = vec_t if step < steps: if model_x is None: model_x = self.model_fn(x, vec_t) model_prev_list[-1] = model_x if callback is not None: callback({'x': x, 'i': step_index, 'denoised': model_prev_list[-1]}) else: raise NotImplementedError() return x def interpolate_fn(x, xp, yp): """ A piecewise linear function y = f(x), using xp and yp as keypoints. We implement f(x) in a differentiable way (i.e. applicable for autograd). The function f(x) is well-defined for all x-axis. (For x beyond the bounds of xp, we use the outmost points of xp to define the linear function.) Args: x: PyTorch tensor with shape [N, C], where N is the batch size, C is the number of channels (we use C = 1 for DPM-Solver). xp: PyTorch tensor with shape [C, K], where K is the number of keypoints. yp: PyTorch tensor with shape [C, K]. Returns: The function values f(x), with shape [N, C]. """ N, K = x.shape[0], xp.shape[1] all_x = torch.cat([x.unsqueeze(2), xp.unsqueeze(0).repeat((N, 1, 1))], dim=2) sorted_all_x, x_indices = torch.sort(all_x, dim=2) x_idx = torch.argmin(x_indices, dim=2) cand_start_idx = x_idx - 1 start_idx = torch.where( torch.eq(x_idx, 0), torch.tensor(1, device=x.device), torch.where( torch.eq(x_idx, K), torch.tensor(K - 2, device=x.device), cand_start_idx, ), ) end_idx = torch.where(torch.eq(start_idx, cand_start_idx), start_idx + 2, start_idx + 1) start_x = torch.gather(sorted_all_x, dim=2, index=start_idx.unsqueeze(2)).squeeze(2) end_x = torch.gather(sorted_all_x, dim=2, index=end_idx.unsqueeze(2)).squeeze(2) start_idx2 = torch.where( torch.eq(x_idx, 0), torch.tensor(0, device=x.device), torch.where( torch.eq(x_idx, K), torch.tensor(K - 2, device=x.device), cand_start_idx, ), ) y_positions_expanded = yp.unsqueeze(0).expand(N, -1, -1) start_y = torch.gather(y_positions_expanded, dim=2, index=start_idx2.unsqueeze(2)).squeeze(2) end_y = torch.gather(y_positions_expanded, dim=2, index=(start_idx2 + 1).unsqueeze(2)).squeeze(2) cand = start_y + (x - start_x) * (end_y - start_y) / (end_x - start_x) return cand def expand_dims(v, dims): """ Expand the tensor `v` to the dim `dims`. Args: `v`: a PyTorch tensor with shape [N]. `dim`: a `int`. Returns: a PyTorch tensor with shape [N, 1, 1, ..., 1] and the total dimension is `dims`. """ return v[(...,) + (None,)*(dims - 1)] class SigmaConvert: schedule = "" def marginal_log_mean_coeff(self, sigma): return 0.5 * torch.log(1 / ((sigma * sigma) + 1)) def marginal_alpha(self, t): return torch.exp(self.marginal_log_mean_coeff(t)) def marginal_std(self, t): return torch.sqrt(1. - torch.exp(2. * self.marginal_log_mean_coeff(t))) def marginal_lambda(self, t): """ Compute lambda_t = log(alpha_t) - log(sigma_t) of a given continuous-time label t in [0, T]. """ log_mean_coeff = self.marginal_log_mean_coeff(t) log_std = 0.5 * torch.log(1. - torch.exp(2. * log_mean_coeff)) return log_mean_coeff - log_std def predict_eps_sigma(model, input, sigma_in, **kwargs): sigma = sigma_in.view(sigma_in.shape[:1] + (1,) * (input.ndim - 1)) input = input * ((sigma ** 2 + 1.0) ** 0.5) return (input - model(input, sigma_in, **kwargs)) / sigma def sample_unipc(model, noise, sigmas, extra_args=None, callback=None, disable=False, variant='bh1'): timesteps = sigmas.clone() if sigmas[-1] == 0: timesteps = sigmas[:] timesteps[-1] = 0.001 else: timesteps = sigmas.clone() ns = SigmaConvert() noise = noise / torch.sqrt(1.0 + timesteps[0] ** 2.0) model_type = "noise" model_fn = model_wrapper( lambda input, sigma, **kwargs: predict_eps_sigma(model, input, sigma, **kwargs), ns, model_type=model_type, guidance_type="uncond", model_kwargs=extra_args, ) order = min(3, len(timesteps) - 2) uni_pc = UniPC(model_fn, ns, predict_x0=True, thresholding=False, variant=variant) x = uni_pc.sample(noise, timesteps=timesteps, skip_type="time_uniform", method="multistep", order=order, lower_order_final=True, callback=callback, disable_pbar=disable) x /= ns.marginal_alpha(timesteps[-1]) return x def sample_unipc_bh2(model, noise, sigmas, extra_args=None, callback=None, disable=False): return sample_unipc(model, noise, sigmas, extra_args, callback, disable, variant='bh2')
import math from scipy import integrate import torch from torch import nn import torchsde from tqdm.auto import trange, tqdm from . import utils def append_zero(x): return torch.cat([x, x.new_zeros([1])]) def get_sigmas_karras(n, sigma_min, sigma_max, rho=7., device='cpu'): """Constructs the noise schedule of Karras et al. (2022).""" ramp = torch.linspace(0, 1, n, device=device) min_inv_rho = sigma_min ** (1 / rho) max_inv_rho = sigma_max ** (1 / rho) sigmas = (max_inv_rho + ramp * (min_inv_rho - max_inv_rho)) ** rho return append_zero(sigmas).to(device) def get_sigmas_exponential(n, sigma_min, sigma_max, device='cpu'): """Constructs an exponential noise schedule.""" sigmas = torch.linspace(math.log(sigma_max), math.log(sigma_min), n, device=device).exp() return append_zero(sigmas) def get_sigmas_polyexponential(n, sigma_min, sigma_max, rho=1., device='cpu'): """Constructs an polynomial in log sigma noise schedule.""" ramp = torch.linspace(1, 0, n, device=device) ** rho sigmas = torch.exp(ramp * (math.log(sigma_max) - math.log(sigma_min)) + math.log(sigma_min)) return append_zero(sigmas) def get_sigmas_vp(n, beta_d=19.9, beta_min=0.1, eps_s=1e-3, device='cpu'): """Constructs a continuous VP noise schedule.""" t = torch.linspace(1, eps_s, n, device=device) sigmas = torch.sqrt(torch.exp(beta_d * t ** 2 / 2 + beta_min * t) - 1) return append_zero(sigmas) def to_d(x, sigma, denoised): """Converts a denoiser output to a Karras ODE derivative.""" return (x - denoised) / utils.append_dims(sigma, x.ndim) def get_ancestral_step(sigma_from, sigma_to, eta=1.): """Calculates the noise level (sigma_down) to step down to and the amount of noise to add (sigma_up) when doing an ancestral sampling step.""" if not eta: return sigma_to, 0. sigma_up = min(sigma_to, eta * (sigma_to ** 2 * (sigma_from ** 2 - sigma_to ** 2) / sigma_from ** 2) ** 0.5) sigma_down = (sigma_to ** 2 - sigma_up ** 2) ** 0.5 return sigma_down, sigma_up def default_noise_sampler(x): return lambda sigma, sigma_next: torch.randn_like(x) class BatchedBrownianTree: """A wrapper around torchsde.BrownianTree that enables batches of entropy.""" def __init__(self, x, t0, t1, seed=None, **kwargs): self.cpu_tree = True if "cpu" in kwargs: self.cpu_tree = kwargs.pop("cpu") t0, t1, self.sign = self.sort(t0, t1) w0 = kwargs.get('w0', torch.zeros_like(x)) if seed is None: seed = torch.randint(0, 2 ** 63 - 1, []).item() self.batched = True try: assert len(seed) == x.shape[0] w0 = w0[0] except TypeError: seed = [seed] self.batched = False if self.cpu_tree: self.trees = [torchsde.BrownianTree(t0.cpu(), w0.cpu(), t1.cpu(), entropy=s, **kwargs) for s in seed] else: self.trees = [torchsde.BrownianTree(t0, w0, t1, entropy=s, **kwargs) for s in seed] @staticmethod def sort(a, b): return (a, b, 1) if a < b else (b, a, -1) def __call__(self, t0, t1): t0, t1, sign = self.sort(t0, t1) if self.cpu_tree: w = torch.stack([tree(t0.cpu().float(), t1.cpu().float()).to(t0.dtype).to(t0.device) for tree in self.trees]) * (self.sign * sign) else: w = torch.stack([tree(t0, t1) for tree in self.trees]) * (self.sign * sign) return w if self.batched else w[0] class BrownianTreeNoiseSampler: """A noise sampler backed by a torchsde.BrownianTree. Args: x (Tensor): The tensor whose shape, device and dtype to use to generate random samples. sigma_min (float): The low end of the valid interval. sigma_max (float): The high end of the valid interval. seed (int or List[int]): The random seed. If a list of seeds is supplied instead of a single integer, then the noise sampler will use one BrownianTree per batch item, each with its own seed. transform (callable): A function that maps sigma to the sampler's internal timestep. """ def __init__(self, x, sigma_min, sigma_max, seed=None, transform=lambda x: x, cpu=False): self.transform = transform t0, t1 = self.transform(torch.as_tensor(sigma_min)), self.transform(torch.as_tensor(sigma_max)) self.tree = BatchedBrownianTree(x, t0, t1, seed, cpu=cpu) def __call__(self, sigma, sigma_next): t0, t1 = self.transform(torch.as_tensor(sigma)), self.transform(torch.as_tensor(sigma_next)) return self.tree(t0, t1) / (t1 - t0).abs().sqrt() @torch.no_grad() def sample_euler(model, x, sigmas, extra_args=None, callback=None, disable=None, s_churn=0., s_tmin=0., s_tmax=float('inf'), s_noise=1.): """Implements Algorithm 2 (Euler steps) from Karras et al. (2022).""" extra_args = {} if extra_args is None else extra_args s_in = x.new_ones([x.shape[0]]) for i in trange(len(sigmas) - 1, disable=disable): if s_churn > 0: gamma = min(s_churn / (len(sigmas) - 1), 2 ** 0.5 - 1) if s_tmin <= sigmas[i] <= s_tmax else 0. sigma_hat = sigmas[i] * (gamma + 1) else: gamma = 0 sigma_hat = sigmas[i] if gamma > 0: eps = torch.randn_like(x) * s_noise x = x + eps * (sigma_hat ** 2 - sigmas[i] ** 2) ** 0.5 denoised = model(x, sigma_hat * s_in, **extra_args) d = to_d(x, sigma_hat, denoised) if callback is not None: callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigma_hat, 'denoised': denoised}) dt = sigmas[i + 1] - sigma_hat x = x + d * dt return x @torch.no_grad() def sample_euler_ancestral(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None): """Ancestral sampling with Euler method steps.""" extra_args = {} if extra_args is None else extra_args noise_sampler = default_noise_sampler(x) if noise_sampler is None else noise_sampler s_in = x.new_ones([x.shape[0]]) for i in trange(len(sigmas) - 1, disable=disable): denoised = model(x, sigmas[i] * s_in, **extra_args) sigma_down, sigma_up = get_ancestral_step(sigmas[i], sigmas[i + 1], eta=eta) if callback is not None: callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) d = to_d(x, sigmas[i], denoised) dt = sigma_down - sigmas[i] x = x + d * dt if sigmas[i + 1] > 0: x = x + noise_sampler(sigmas[i], sigmas[i + 1]) * s_noise * sigma_up return x @torch.no_grad() def sample_heun(model, x, sigmas, extra_args=None, callback=None, disable=None, s_churn=0., s_tmin=0., s_tmax=float('inf'), s_noise=1.): """Implements Algorithm 2 (Heun steps) from Karras et al. (2022).""" extra_args = {} if extra_args is None else extra_args s_in = x.new_ones([x.shape[0]]) for i in trange(len(sigmas) - 1, disable=disable): if s_churn > 0: gamma = min(s_churn / (len(sigmas) - 1), 2 ** 0.5 - 1) if s_tmin <= sigmas[i] <= s_tmax else 0. sigma_hat = sigmas[i] * (gamma + 1) else: gamma = 0 sigma_hat = sigmas[i] sigma_hat = sigmas[i] * (gamma + 1) if gamma > 0: eps = torch.randn_like(x) * s_noise x = x + eps * (sigma_hat ** 2 - sigmas[i] ** 2) ** 0.5 denoised = model(x, sigma_hat * s_in, **extra_args) d = to_d(x, sigma_hat, denoised) if callback is not None: callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigma_hat, 'denoised': denoised}) dt = sigmas[i + 1] - sigma_hat if sigmas[i + 1] == 0: x = x + d * dt else: x_2 = x + d * dt denoised_2 = model(x_2, sigmas[i + 1] * s_in, **extra_args) d_2 = to_d(x_2, sigmas[i + 1], denoised_2) d_prime = (d + d_2) / 2 x = x + d_prime * dt return x @torch.no_grad() def sample_dpm_2(model, x, sigmas, extra_args=None, callback=None, disable=None, s_churn=0., s_tmin=0., s_tmax=float('inf'), s_noise=1.): """A sampler inspired by DPM-Solver-2 and Algorithm 2 from Karras et al. (2022).""" extra_args = {} if extra_args is None else extra_args s_in = x.new_ones([x.shape[0]]) for i in trange(len(sigmas) - 1, disable=disable): if s_churn > 0: gamma = min(s_churn / (len(sigmas) - 1), 2 ** 0.5 - 1) if s_tmin <= sigmas[i] <= s_tmax else 0. sigma_hat = sigmas[i] * (gamma + 1) else: gamma = 0 sigma_hat = sigmas[i] if gamma > 0: eps = torch.randn_like(x) * s_noise x = x + eps * (sigma_hat ** 2 - sigmas[i] ** 2) ** 0.5 denoised = model(x, sigma_hat * s_in, **extra_args) d = to_d(x, sigma_hat, denoised) if callback is not None: callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigma_hat, 'denoised': denoised}) if sigmas[i + 1] == 0: dt = sigmas[i + 1] - sigma_hat x = x + d * dt else: sigma_mid = sigma_hat.log().lerp(sigmas[i + 1].log(), 0.5).exp() dt_1 = sigma_mid - sigma_hat dt_2 = sigmas[i + 1] - sigma_hat x_2 = x + d * dt_1 denoised_2 = model(x_2, sigma_mid * s_in, **extra_args) d_2 = to_d(x_2, sigma_mid, denoised_2) x = x + d_2 * dt_2 return x @torch.no_grad() def sample_dpm_2_ancestral(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None): """Ancestral sampling with DPM-Solver second-order steps.""" extra_args = {} if extra_args is None else extra_args noise_sampler = default_noise_sampler(x) if noise_sampler is None else noise_sampler s_in = x.new_ones([x.shape[0]]) for i in trange(len(sigmas) - 1, disable=disable): denoised = model(x, sigmas[i] * s_in, **extra_args) sigma_down, sigma_up = get_ancestral_step(sigmas[i], sigmas[i + 1], eta=eta) if callback is not None: callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) d = to_d(x, sigmas[i], denoised) if sigma_down == 0: dt = sigma_down - sigmas[i] x = x + d * dt else: sigma_mid = sigmas[i].log().lerp(sigma_down.log(), 0.5).exp() dt_1 = sigma_mid - sigmas[i] dt_2 = sigma_down - sigmas[i] x_2 = x + d * dt_1 denoised_2 = model(x_2, sigma_mid * s_in, **extra_args) d_2 = to_d(x_2, sigma_mid, denoised_2) x = x + d_2 * dt_2 x = x + noise_sampler(sigmas[i], sigmas[i + 1]) * s_noise * sigma_up return x def linear_multistep_coeff(order, t, i, j): if order - 1 > i: raise ValueError(f'Order {order} too high for step {i}') def fn(tau): prod = 1. for k in range(order): if j == k: continue prod *= (tau - t[i - k]) / (t[i - j] - t[i - k]) return prod return integrate.quad(fn, t[i], t[i + 1], epsrel=1e-4)[0] @torch.no_grad() def sample_lms(model, x, sigmas, extra_args=None, callback=None, disable=None, order=4): extra_args = {} if extra_args is None else extra_args s_in = x.new_ones([x.shape[0]]) sigmas_cpu = sigmas.detach().cpu().numpy() ds = [] for i in trange(len(sigmas) - 1, disable=disable): denoised = model(x, sigmas[i] * s_in, **extra_args) d = to_d(x, sigmas[i], denoised) ds.append(d) if len(ds) > order: ds.pop(0) if callback is not None: callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) cur_order = min(i + 1, order) coeffs = [linear_multistep_coeff(cur_order, sigmas_cpu, i, j) for j in range(cur_order)] x = x + sum(coeff * d for coeff, d in zip(coeffs, reversed(ds))) return x class PIDStepSizeController: """A PID controller for ODE adaptive step size control.""" def __init__(self, h, pcoeff, icoeff, dcoeff, order=1, accept_safety=0.81, eps=1e-8): self.h = h self.b1 = (pcoeff + icoeff + dcoeff) / order self.b2 = -(pcoeff + 2 * dcoeff) / order self.b3 = dcoeff / order self.accept_safety = accept_safety self.eps = eps self.errs = [] def limiter(self, x): return 1 + math.atan(x - 1) def propose_step(self, error): inv_error = 1 / (float(error) + self.eps) if not self.errs: self.errs = [inv_error, inv_error, inv_error] self.errs[0] = inv_error factor = self.errs[0] ** self.b1 * self.errs[1] ** self.b2 * self.errs[2] ** self.b3 factor = self.limiter(factor) accept = factor >= self.accept_safety if accept: self.errs[2] = self.errs[1] self.errs[1] = self.errs[0] self.h *= factor return accept class DPMSolver(nn.Module): """DPM-Solver. See https: def __init__(self, model, extra_args=None, eps_callback=None, info_callback=None): super().__init__() self.model = model self.extra_args = {} if extra_args is None else extra_args self.eps_callback = eps_callback self.info_callback = info_callback def t(self, sigma): return -sigma.log() def sigma(self, t): return t.neg().exp() def eps(self, eps_cache, key, x, t, *args, **kwargs): if key in eps_cache: return eps_cache[key], eps_cache sigma = self.sigma(t) * x.new_ones([x.shape[0]]) eps = (x - self.model(x, sigma, *args, **self.extra_args, **kwargs)) / self.sigma(t) if self.eps_callback is not None: self.eps_callback() return eps, {key: eps, **eps_cache} def dpm_solver_1_step(self, x, t, t_next, eps_cache=None): eps_cache = {} if eps_cache is None else eps_cache h = t_next - t eps, eps_cache = self.eps(eps_cache, 'eps', x, t) x_1 = x - self.sigma(t_next) * h.expm1() * eps return x_1, eps_cache def dpm_solver_2_step(self, x, t, t_next, r1=1 / 2, eps_cache=None): eps_cache = {} if eps_cache is None else eps_cache h = t_next - t eps, eps_cache = self.eps(eps_cache, 'eps', x, t) s1 = t + r1 * h u1 = x - self.sigma(s1) * (r1 * h).expm1() * eps eps_r1, eps_cache = self.eps(eps_cache, 'eps_r1', u1, s1) x_2 = x - self.sigma(t_next) * h.expm1() * eps - self.sigma(t_next) / (2 * r1) * h.expm1() * (eps_r1 - eps) return x_2, eps_cache def dpm_solver_3_step(self, x, t, t_next, r1=1 / 3, r2=2 / 3, eps_cache=None): eps_cache = {} if eps_cache is None else eps_cache h = t_next - t eps, eps_cache = self.eps(eps_cache, 'eps', x, t) s1 = t + r1 * h s2 = t + r2 * h u1 = x - self.sigma(s1) * (r1 * h).expm1() * eps eps_r1, eps_cache = self.eps(eps_cache, 'eps_r1', u1, s1) u2 = x - self.sigma(s2) * (r2 * h).expm1() * eps - self.sigma(s2) * (r2 / r1) * ((r2 * h).expm1() / (r2 * h) - 1) * (eps_r1 - eps) eps_r2, eps_cache = self.eps(eps_cache, 'eps_r2', u2, s2) x_3 = x - self.sigma(t_next) * h.expm1() * eps - self.sigma(t_next) / r2 * (h.expm1() / h - 1) * (eps_r2 - eps) return x_3, eps_cache def dpm_solver_fast(self, x, t_start, t_end, nfe, eta=0., s_noise=1., noise_sampler=None): noise_sampler = default_noise_sampler(x) if noise_sampler is None else noise_sampler if not t_end > t_start and eta: raise ValueError('eta must be 0 for reverse sampling') m = math.floor(nfe / 3) + 1 ts = torch.linspace(t_start, t_end, m + 1, device=x.device) if nfe % 3 == 0: orders = [3] * (m - 2) + [2, 1] else: orders = [3] * (m - 1) + [nfe % 3] for i in range(len(orders)): eps_cache = {} t, t_next = ts[i], ts[i + 1] if eta: sd, su = get_ancestral_step(self.sigma(t), self.sigma(t_next), eta) t_next_ = torch.minimum(t_end, self.t(sd)) su = (self.sigma(t_next) ** 2 - self.sigma(t_next_) ** 2) ** 0.5 else: t_next_, su = t_next, 0. eps, eps_cache = self.eps(eps_cache, 'eps', x, t) denoised = x - self.sigma(t) * eps if self.info_callback is not None: self.info_callback({'x': x, 'i': i, 't': ts[i], 't_up': t, 'denoised': denoised}) if orders[i] == 1: x, eps_cache = self.dpm_solver_1_step(x, t, t_next_, eps_cache=eps_cache) elif orders[i] == 2: x, eps_cache = self.dpm_solver_2_step(x, t, t_next_, eps_cache=eps_cache) else: x, eps_cache = self.dpm_solver_3_step(x, t, t_next_, eps_cache=eps_cache) x = x + su * s_noise * noise_sampler(self.sigma(t), self.sigma(t_next)) return x def dpm_solver_adaptive(self, x, t_start, t_end, order=3, rtol=0.05, atol=0.0078, h_init=0.05, pcoeff=0., icoeff=1., dcoeff=0., accept_safety=0.81, eta=0., s_noise=1., noise_sampler=None): noise_sampler = default_noise_sampler(x) if noise_sampler is None else noise_sampler if order not in {2, 3}: raise ValueError('order should be 2 or 3') forward = t_end > t_start if not forward and eta: raise ValueError('eta must be 0 for reverse sampling') h_init = abs(h_init) * (1 if forward else -1) atol = torch.tensor(atol) rtol = torch.tensor(rtol) s = t_start x_prev = x accept = True pid = PIDStepSizeController(h_init, pcoeff, icoeff, dcoeff, 1.5 if eta else order, accept_safety) info = {'steps': 0, 'nfe': 0, 'n_accept': 0, 'n_reject': 0} while s < t_end - 1e-5 if forward else s > t_end + 1e-5: eps_cache = {} t = torch.minimum(t_end, s + pid.h) if forward else torch.maximum(t_end, s + pid.h) if eta: sd, su = get_ancestral_step(self.sigma(s), self.sigma(t), eta) t_ = torch.minimum(t_end, self.t(sd)) su = (self.sigma(t) ** 2 - self.sigma(t_) ** 2) ** 0.5 else: t_, su = t, 0. eps, eps_cache = self.eps(eps_cache, 'eps', x, s) denoised = x - self.sigma(s) * eps if order == 2: x_low, eps_cache = self.dpm_solver_1_step(x, s, t_, eps_cache=eps_cache) x_high, eps_cache = self.dpm_solver_2_step(x, s, t_, eps_cache=eps_cache) else: x_low, eps_cache = self.dpm_solver_2_step(x, s, t_, r1=1 / 3, eps_cache=eps_cache) x_high, eps_cache = self.dpm_solver_3_step(x, s, t_, eps_cache=eps_cache) delta = torch.maximum(atol, rtol * torch.maximum(x_low.abs(), x_prev.abs())) error = torch.linalg.norm((x_low - x_high) / delta) / x.numel() ** 0.5 accept = pid.propose_step(error) if accept: x_prev = x_low x = x_high + su * s_noise * noise_sampler(self.sigma(s), self.sigma(t)) s = t info['n_accept'] += 1 else: info['n_reject'] += 1 info['nfe'] += order info['steps'] += 1 if self.info_callback is not None: self.info_callback({'x': x, 'i': info['steps'] - 1, 't': s, 't_up': s, 'denoised': denoised, 'error': error, 'h': pid.h, **info}) return x, info @torch.no_grad() def sample_dpm_fast(model, x, sigma_min, sigma_max, n, extra_args=None, callback=None, disable=None, eta=0., s_noise=1., noise_sampler=None): """DPM-Solver-Fast (fixed step size). See https: if sigma_min <= 0 or sigma_max <= 0: raise ValueError('sigma_min and sigma_max must not be 0') with tqdm(total=n, disable=disable) as pbar: dpm_solver = DPMSolver(model, extra_args, eps_callback=pbar.update) if callback is not None: dpm_solver.info_callback = lambda info: callback({'sigma': dpm_solver.sigma(info['t']), 'sigma_hat': dpm_solver.sigma(info['t_up']), **info}) return dpm_solver.dpm_solver_fast(x, dpm_solver.t(torch.tensor(sigma_max)), dpm_solver.t(torch.tensor(sigma_min)), n, eta, s_noise, noise_sampler) @torch.no_grad() def sample_dpm_adaptive(model, x, sigma_min, sigma_max, extra_args=None, callback=None, disable=None, order=3, rtol=0.05, atol=0.0078, h_init=0.05, pcoeff=0., icoeff=1., dcoeff=0., accept_safety=0.81, eta=0., s_noise=1., noise_sampler=None, return_info=False): """DPM-Solver-12 and 23 (adaptive step size). See https: if sigma_min <= 0 or sigma_max <= 0: raise ValueError('sigma_min and sigma_max must not be 0') with tqdm(disable=disable) as pbar: dpm_solver = DPMSolver(model, extra_args, eps_callback=pbar.update) if callback is not None: dpm_solver.info_callback = lambda info: callback({'sigma': dpm_solver.sigma(info['t']), 'sigma_hat': dpm_solver.sigma(info['t_up']), **info}) x, info = dpm_solver.dpm_solver_adaptive(x, dpm_solver.t(torch.tensor(sigma_max)), dpm_solver.t(torch.tensor(sigma_min)), order, rtol, atol, h_init, pcoeff, icoeff, dcoeff, accept_safety, eta, s_noise, noise_sampler) if return_info: return x, info return x @torch.no_grad() def sample_dpmpp_2s_ancestral(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None): """Ancestral sampling with DPM-Solver++(2S) second-order steps.""" extra_args = {} if extra_args is None else extra_args noise_sampler = default_noise_sampler(x) if noise_sampler is None else noise_sampler s_in = x.new_ones([x.shape[0]]) sigma_fn = lambda t: t.neg().exp() t_fn = lambda sigma: sigma.log().neg() for i in trange(len(sigmas) - 1, disable=disable): denoised = model(x, sigmas[i] * s_in, **extra_args) sigma_down, sigma_up = get_ancestral_step(sigmas[i], sigmas[i + 1], eta=eta) if callback is not None: callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) if sigma_down == 0: d = to_d(x, sigmas[i], denoised) dt = sigma_down - sigmas[i] x = x + d * dt else: t, t_next = t_fn(sigmas[i]), t_fn(sigma_down) r = 1 / 2 h = t_next - t s = t + r * h x_2 = (sigma_fn(s) / sigma_fn(t)) * x - (-h * r).expm1() * denoised denoised_2 = model(x_2, sigma_fn(s) * s_in, **extra_args) x = (sigma_fn(t_next) / sigma_fn(t)) * x - (-h).expm1() * denoised_2 if sigmas[i + 1] > 0: x = x + noise_sampler(sigmas[i], sigmas[i + 1]) * s_noise * sigma_up return x @torch.no_grad() def sample_dpmpp_sde(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None, r=1 / 2): """DPM-Solver++ (stochastic).""" if len(sigmas) <= 1: return x sigma_min, sigma_max = sigmas[sigmas > 0].min(), sigmas.max() seed = extra_args.get("seed", None) noise_sampler = BrownianTreeNoiseSampler(x, sigma_min, sigma_max, seed=seed, cpu=True) if noise_sampler is None else noise_sampler extra_args = {} if extra_args is None else extra_args s_in = x.new_ones([x.shape[0]]) sigma_fn = lambda t: t.neg().exp() t_fn = lambda sigma: sigma.log().neg() for i in trange(len(sigmas) - 1, disable=disable): denoised = model(x, sigmas[i] * s_in, **extra_args) if callback is not None: callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) if sigmas[i + 1] == 0: d = to_d(x, sigmas[i], denoised) dt = sigmas[i + 1] - sigmas[i] x = x + d * dt else: t, t_next = t_fn(sigmas[i]), t_fn(sigmas[i + 1]) h = t_next - t s = t + h * r fac = 1 / (2 * r) sd, su = get_ancestral_step(sigma_fn(t), sigma_fn(s), eta) s_ = t_fn(sd) x_2 = (sigma_fn(s_) / sigma_fn(t)) * x - (t - s_).expm1() * denoised x_2 = x_2 + noise_sampler(sigma_fn(t), sigma_fn(s)) * s_noise * su denoised_2 = model(x_2, sigma_fn(s) * s_in, **extra_args) sd, su = get_ancestral_step(sigma_fn(t), sigma_fn(t_next), eta) t_next_ = t_fn(sd) denoised_d = (1 - fac) * denoised + fac * denoised_2 x = (sigma_fn(t_next_) / sigma_fn(t)) * x - (t - t_next_).expm1() * denoised_d x = x + noise_sampler(sigma_fn(t), sigma_fn(t_next)) * s_noise * su return x @torch.no_grad() def sample_dpmpp_2m(model, x, sigmas, extra_args=None, callback=None, disable=None): """DPM-Solver++(2M).""" extra_args = {} if extra_args is None else extra_args s_in = x.new_ones([x.shape[0]]) sigma_fn = lambda t: t.neg().exp() t_fn = lambda sigma: sigma.log().neg() old_denoised = None for i in trange(len(sigmas) - 1, disable=disable): denoised = model(x, sigmas[i] * s_in, **extra_args) if callback is not None: callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) t, t_next = t_fn(sigmas[i]), t_fn(sigmas[i + 1]) h = t_next - t if old_denoised is None or sigmas[i + 1] == 0: x = (sigma_fn(t_next) / sigma_fn(t)) * x - (-h).expm1() * denoised else: h_last = t - t_fn(sigmas[i - 1]) r = h_last / h denoised_d = (1 + 1 / (2 * r)) * denoised - (1 / (2 * r)) * old_denoised x = (sigma_fn(t_next) / sigma_fn(t)) * x - (-h).expm1() * denoised_d old_denoised = denoised return x @torch.no_grad() def sample_dpmpp_2m_sde(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None, solver_type='midpoint'): """DPM-Solver++(2M) SDE.""" if len(sigmas) <= 1: return x if solver_type not in {'heun', 'midpoint'}: raise ValueError('solver_type must be \'heun\' or \'midpoint\'') seed = extra_args.get("seed", None) sigma_min, sigma_max = sigmas[sigmas > 0].min(), sigmas.max() noise_sampler = BrownianTreeNoiseSampler(x, sigma_min, sigma_max, seed=seed, cpu=True) if noise_sampler is None else noise_sampler extra_args = {} if extra_args is None else extra_args s_in = x.new_ones([x.shape[0]]) old_denoised = None h_last = None h = None for i in trange(len(sigmas) - 1, disable=disable): denoised = model(x, sigmas[i] * s_in, **extra_args) if callback is not None: callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) if sigmas[i + 1] == 0: x = denoised else: t, s = -sigmas[i].log(), -sigmas[i + 1].log() h = s - t eta_h = eta * h x = sigmas[i + 1] / sigmas[i] * (-eta_h).exp() * x + (-h - eta_h).expm1().neg() * denoised if old_denoised is not None: r = h_last / h if solver_type == 'heun': x = x + ((-h - eta_h).expm1().neg() / (-h - eta_h) + 1) * (1 / r) * (denoised - old_denoised) elif solver_type == 'midpoint': x = x + 0.5 * (-h - eta_h).expm1().neg() * (1 / r) * (denoised - old_denoised) if eta: x = x + noise_sampler(sigmas[i], sigmas[i + 1]) * sigmas[i + 1] * (-2 * eta_h).expm1().neg().sqrt() * s_noise old_denoised = denoised h_last = h return x @torch.no_grad() def sample_dpmpp_3m_sde(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None): """DPM-Solver++(3M) SDE.""" if len(sigmas) <= 1: return x seed = extra_args.get("seed", None) sigma_min, sigma_max = sigmas[sigmas > 0].min(), sigmas.max() noise_sampler = BrownianTreeNoiseSampler(x, sigma_min, sigma_max, seed=seed, cpu=True) if noise_sampler is None else noise_sampler extra_args = {} if extra_args is None else extra_args s_in = x.new_ones([x.shape[0]]) denoised_1, denoised_2 = None, None h, h_1, h_2 = None, None, None for i in trange(len(sigmas) - 1, disable=disable): denoised = model(x, sigmas[i] * s_in, **extra_args) if callback is not None: callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) if sigmas[i + 1] == 0: x = denoised else: t, s = -sigmas[i].log(), -sigmas[i + 1].log() h = s - t h_eta = h * (eta + 1) x = torch.exp(-h_eta) * x + (-h_eta).expm1().neg() * denoised if h_2 is not None: r0 = h_1 / h r1 = h_2 / h d1_0 = (denoised - denoised_1) / r0 d1_1 = (denoised_1 - denoised_2) / r1 d1 = d1_0 + (d1_0 - d1_1) * r0 / (r0 + r1) d2 = (d1_0 - d1_1) / (r0 + r1) phi_2 = h_eta.neg().expm1() / h_eta + 1 phi_3 = phi_2 / h_eta - 0.5 x = x + phi_2 * d1 - phi_3 * d2 elif h_1 is not None: r = h_1 / h d = (denoised - denoised_1) / r phi_2 = h_eta.neg().expm1() / h_eta + 1 x = x + phi_2 * d if eta: x = x + noise_sampler(sigmas[i], sigmas[i + 1]) * sigmas[i + 1] * (-2 * h * eta).expm1().neg().sqrt() * s_noise denoised_1, denoised_2 = denoised, denoised_1 h_1, h_2 = h, h_1 return x @torch.no_grad() def sample_dpmpp_3m_sde_gpu(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None): if len(sigmas) <= 1: return x sigma_min, sigma_max = sigmas[sigmas > 0].min(), sigmas.max() noise_sampler = BrownianTreeNoiseSampler(x, sigma_min, sigma_max, seed=extra_args.get("seed", None), cpu=False) if noise_sampler is None else noise_sampler return sample_dpmpp_3m_sde(model, x, sigmas, extra_args=extra_args, callback=callback, disable=disable, eta=eta, s_noise=s_noise, noise_sampler=noise_sampler) @torch.no_grad() def sample_dpmpp_2m_sde_gpu(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None, solver_type='midpoint'): if len(sigmas) <= 1: return x sigma_min, sigma_max = sigmas[sigmas > 0].min(), sigmas.max() noise_sampler = BrownianTreeNoiseSampler(x, sigma_min, sigma_max, seed=extra_args.get("seed", None), cpu=False) if noise_sampler is None else noise_sampler return sample_dpmpp_2m_sde(model, x, sigmas, extra_args=extra_args, callback=callback, disable=disable, eta=eta, s_noise=s_noise, noise_sampler=noise_sampler, solver_type=solver_type) @torch.no_grad() def sample_dpmpp_sde_gpu(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None, r=1 / 2): if len(sigmas) <= 1: return x sigma_min, sigma_max = sigmas[sigmas > 0].min(), sigmas.max() noise_sampler = BrownianTreeNoiseSampler(x, sigma_min, sigma_max, seed=extra_args.get("seed", None), cpu=False) if noise_sampler is None else noise_sampler return sample_dpmpp_sde(model, x, sigmas, extra_args=extra_args, callback=callback, disable=disable, eta=eta, s_noise=s_noise, noise_sampler=noise_sampler, r=r) def DDPMSampler_step(x, sigma, sigma_prev, noise, noise_sampler): alpha_cumprod = 1 / ((sigma * sigma) + 1) alpha_cumprod_prev = 1 / ((sigma_prev * sigma_prev) + 1) alpha = (alpha_cumprod / alpha_cumprod_prev) mu = (1.0 / alpha).sqrt() * (x - (1 - alpha) * noise / (1 - alpha_cumprod).sqrt()) if sigma_prev > 0: mu += ((1 - alpha) * (1. - alpha_cumprod_prev) / (1. - alpha_cumprod)).sqrt() * noise_sampler(sigma, sigma_prev) return mu def generic_step_sampler(model, x, sigmas, extra_args=None, callback=None, disable=None, noise_sampler=None, step_function=None): extra_args = {} if extra_args is None else extra_args noise_sampler = default_noise_sampler(x) if noise_sampler is None else noise_sampler s_in = x.new_ones([x.shape[0]]) for i in trange(len(sigmas) - 1, disable=disable): denoised = model(x, sigmas[i] * s_in, **extra_args) if callback is not None: callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) x = step_function(x / torch.sqrt(1.0 + sigmas[i] ** 2.0), sigmas[i], sigmas[i + 1], (x - denoised) / sigmas[i], noise_sampler) if sigmas[i + 1] != 0: x *= torch.sqrt(1.0 + sigmas[i + 1] ** 2.0) return x @torch.no_grad() def sample_ddpm(model, x, sigmas, extra_args=None, callback=None, disable=None, noise_sampler=None): return generic_step_sampler(model, x, sigmas, extra_args, callback, disable, noise_sampler, DDPMSampler_step) @torch.no_grad() def sample_lcm(model, x, sigmas, extra_args=None, callback=None, disable=None, noise_sampler=None): extra_args = {} if extra_args is None else extra_args noise_sampler = default_noise_sampler(x) if noise_sampler is None else noise_sampler s_in = x.new_ones([x.shape[0]]) for i in trange(len(sigmas) - 1, disable=disable): denoised = model(x, sigmas[i] * s_in, **extra_args) if callback is not None: callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) x = denoised if sigmas[i + 1] > 0: x = model.inner_model.inner_model.model_sampling.noise_scaling(sigmas[i + 1], noise_sampler(sigmas[i], sigmas[i + 1]), x) return x @torch.no_grad() def sample_heunpp2(model, x, sigmas, extra_args=None, callback=None, disable=None, s_churn=0., s_tmin=0., s_tmax=float('inf'), s_noise=1.): extra_args = {} if extra_args is None else extra_args s_in = x.new_ones([x.shape[0]]) s_end = sigmas[-1] for i in trange(len(sigmas) - 1, disable=disable): gamma = min(s_churn / (len(sigmas) - 1), 2 ** 0.5 - 1) if s_tmin <= sigmas[i] <= s_tmax else 0. eps = torch.randn_like(x) * s_noise sigma_hat = sigmas[i] * (gamma + 1) if gamma > 0: x = x + eps * (sigma_hat ** 2 - sigmas[i] ** 2) ** 0.5 denoised = model(x, sigma_hat * s_in, **extra_args) d = to_d(x, sigma_hat, denoised) if callback is not None: callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigma_hat, 'denoised': denoised}) dt = sigmas[i + 1] - sigma_hat if sigmas[i + 1] == s_end: x = x + d * dt elif sigmas[i + 2] == s_end: x_2 = x + d * dt denoised_2 = model(x_2, sigmas[i + 1] * s_in, **extra_args) d_2 = to_d(x_2, sigmas[i + 1], denoised_2) w = 2 * sigmas[0] w2 = sigmas[i+1]/w w1 = 1 - w2 d_prime = d * w1 + d_2 * w2 x = x + d_prime * dt else: x_2 = x + d * dt denoised_2 = model(x_2, sigmas[i + 1] * s_in, **extra_args) d_2 = to_d(x_2, sigmas[i + 1], denoised_2) dt_2 = sigmas[i + 2] - sigmas[i + 1] x_3 = x_2 + d_2 * dt_2 denoised_3 = model(x_3, sigmas[i + 2] * s_in, **extra_args) d_3 = to_d(x_3, sigmas[i + 2], denoised_3) w = 3 * sigmas[0] w2 = sigmas[i + 1] / w w3 = sigmas[i + 2] / w w1 = 1 - w2 - w3 d_prime = w1 * d + w2 * d_2 + w3 * d_3 x = x + d_prime * dt return x def sample_ipndm(model, x, sigmas, extra_args=None, callback=None, disable=None, max_order=4): extra_args = {} if extra_args is None else extra_args s_in = x.new_ones([x.shape[0]]) x_next = x buffer_model = [] for i in trange(len(sigmas) - 1, disable=disable): t_cur = sigmas[i] t_next = sigmas[i + 1] x_cur = x_next denoised = model(x_cur, t_cur * s_in, **extra_args) if callback is not None: callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) d_cur = (x_cur - denoised) / t_cur order = min(max_order, i+1) if order == 1: x_next = x_cur + (t_next - t_cur) * d_cur elif order == 2: x_next = x_cur + (t_next - t_cur) * (3 * d_cur - buffer_model[-1]) / 2 elif order == 3: x_next = x_cur + (t_next - t_cur) * (23 * d_cur - 16 * buffer_model[-1] + 5 * buffer_model[-2]) / 12 elif order == 4: x_next = x_cur + (t_next - t_cur) * (55 * d_cur - 59 * buffer_model[-1] + 37 * buffer_model[-2] - 9 * buffer_model[-3]) / 24 if len(buffer_model) == max_order - 1: for k in range(max_order - 2): buffer_model[k] = buffer_model[k+1] buffer_model[-1] = d_cur else: buffer_model.append(d_cur) return x_next def sample_ipndm_v(model, x, sigmas, extra_args=None, callback=None, disable=None, max_order=4): extra_args = {} if extra_args is None else extra_args s_in = x.new_ones([x.shape[0]]) x_next = x t_steps = sigmas buffer_model = [] for i in trange(len(sigmas) - 1, disable=disable): t_cur = sigmas[i] t_next = sigmas[i + 1] x_cur = x_next denoised = model(x_cur, t_cur * s_in, **extra_args) if callback is not None: callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) d_cur = (x_cur - denoised) / t_cur order = min(max_order, i+1) if order == 1: x_next = x_cur + (t_next - t_cur) * d_cur elif order == 2: h_n = (t_next - t_cur) h_n_1 = (t_cur - t_steps[i-1]) coeff1 = (2 + (h_n / h_n_1)) / 2 coeff2 = -(h_n / h_n_1) / 2 x_next = x_cur + (t_next - t_cur) * (coeff1 * d_cur + coeff2 * buffer_model[-1]) elif order == 3: h_n = (t_next - t_cur) h_n_1 = (t_cur - t_steps[i-1]) h_n_2 = (t_steps[i-1] - t_steps[i-2]) temp = (1 - h_n / (3 * (h_n + h_n_1)) * (h_n * (h_n + h_n_1)) / (h_n_1 * (h_n_1 + h_n_2))) / 2 coeff1 = (2 + (h_n / h_n_1)) / 2 + temp coeff2 = -(h_n / h_n_1) / 2 - (1 + h_n_1 / h_n_2) * temp coeff3 = temp * h_n_1 / h_n_2 x_next = x_cur + (t_next - t_cur) * (coeff1 * d_cur + coeff2 * buffer_model[-1] + coeff3 * buffer_model[-2]) elif order == 4: h_n = (t_next - t_cur) h_n_1 = (t_cur - t_steps[i-1]) h_n_2 = (t_steps[i-1] - t_steps[i-2]) h_n_3 = (t_steps[i-2] - t_steps[i-3]) temp1 = (1 - h_n / (3 * (h_n + h_n_1)) * (h_n * (h_n + h_n_1)) / (h_n_1 * (h_n_1 + h_n_2))) / 2 temp2 = ((1 - h_n / (3 * (h_n + h_n_1))) / 2 + (1 - h_n / (2 * (h_n + h_n_1))) * h_n / (6 * (h_n + h_n_1 + h_n_2))) \ * (h_n * (h_n + h_n_1) * (h_n + h_n_1 + h_n_2)) / (h_n_1 * (h_n_1 + h_n_2) * (h_n_1 + h_n_2 + h_n_3)) coeff1 = (2 + (h_n / h_n_1)) / 2 + temp1 + temp2 coeff2 = -(h_n / h_n_1) / 2 - (1 + h_n_1 / h_n_2) * temp1 - (1 + (h_n_1 / h_n_2) + (h_n_1 * (h_n_1 + h_n_2) / (h_n_2 * (h_n_2 + h_n_3)))) * temp2 coeff3 = temp1 * h_n_1 / h_n_2 + ((h_n_1 / h_n_2) + (h_n_1 * (h_n_1 + h_n_2) / (h_n_2 * (h_n_2 + h_n_3))) * (1 + h_n_2 / h_n_3)) * temp2 coeff4 = -temp2 * (h_n_1 * (h_n_1 + h_n_2) / (h_n_2 * (h_n_2 + h_n_3))) * h_n_1 / h_n_2 x_next = x_cur + (t_next - t_cur) * (coeff1 * d_cur + coeff2 * buffer_model[-1] + coeff3 * buffer_model[-2] + coeff4 * buffer_model[-3]) if len(buffer_model) == max_order - 1: for k in range(max_order - 2): buffer_model[k] = buffer_model[k+1] buffer_model[-1] = d_cur.detach() else: buffer_model.append(d_cur.detach()) return x_next
from contextlib import contextmanager import hashlib import math from pathlib import Path import shutil import urllib import warnings from PIL import Image import torch from torch import nn, optim from torch.utils import data def hf_datasets_augs_helper(examples, transform, image_key, mode='RGB'): """Apply passed in transforms for HuggingFace Datasets.""" images = [transform(image.convert(mode)) for image in examples[image_key]] return {image_key: images} def append_dims(x, target_dims): """Appends dimensions to the end of a tensor until it has target_dims dimensions.""" dims_to_append = target_dims - x.ndim if dims_to_append < 0: raise ValueError(f'input has {x.ndim} dims but target_dims is {target_dims}, which is less') expanded = x[(...,) + (None,) * dims_to_append] return expanded.detach().clone() if expanded.device.type == 'mps' else expanded def n_params(module): """Returns the number of trainable parameters in a module.""" return sum(p.numel() for p in module.parameters()) def download_file(path, url, digest=None): """Downloads a file if it does not exist, optionally checking its SHA-256 hash.""" path = Path(path) path.parent.mkdir(parents=True, exist_ok=True) if not path.exists(): with urllib.request.urlopen(url) as response, open(path, 'wb') as f: shutil.copyfileobj(response, f) if digest is not None: file_digest = hashlib.sha256(open(path, 'rb').read()).hexdigest() if digest != file_digest: raise OSError(f'hash of {path} (url: {url}) failed to validate') return path @contextmanager def train_mode(model, mode=True): """A context manager that places a model into training mode and restores the previous mode on exit.""" modes = [module.training for module in model.modules()] try: yield model.train(mode) finally: for i, module in enumerate(model.modules()): module.training = modes[i] def eval_mode(model): """A context manager that places a model into evaluation mode and restores the previous mode on exit.""" return train_mode(model, False) @torch.no_grad() def ema_update(model, averaged_model, decay): """Incorporates updated model parameters into an exponential moving averaged version of a model. It should be called after each optimizer step.""" model_params = dict(model.named_parameters()) averaged_params = dict(averaged_model.named_parameters()) assert model_params.keys() == averaged_params.keys() for name, param in model_params.items(): averaged_params[name].mul_(decay).add_(param, alpha=1 - decay) model_buffers = dict(model.named_buffers()) averaged_buffers = dict(averaged_model.named_buffers()) assert model_buffers.keys() == averaged_buffers.keys() for name, buf in model_buffers.items(): averaged_buffers[name].copy_(buf) class EMAWarmup: """Implements an EMA warmup using an inverse decay schedule. If inv_gamma=1 and power=1, implements a simple average. inv_gamma=1, power=2/3 are good values for models you plan to train for a million or more steps (reaches decay factor 0.999 at 31.6K steps, 0.9999 at 1M steps), inv_gamma=1, power=3/4 for models you plan to train for less (reaches decay factor 0.999 at 10K steps, 0.9999 at 215.4k steps). Args: inv_gamma (float): Inverse multiplicative factor of EMA warmup. Default: 1. power (float): Exponential factor of EMA warmup. Default: 1. min_value (float): The minimum EMA decay rate. Default: 0. max_value (float): The maximum EMA decay rate. Default: 1. start_at (int): The epoch to start averaging at. Default: 0. last_epoch (int): The index of last epoch. Default: 0. """ def __init__(self, inv_gamma=1., power=1., min_value=0., max_value=1., start_at=0, last_epoch=0): self.inv_gamma = inv_gamma self.power = power self.min_value = min_value self.max_value = max_value self.start_at = start_at self.last_epoch = last_epoch def state_dict(self): """Returns the state of the class as a :class:`dict`.""" return dict(self.__dict__.items()) def load_state_dict(self, state_dict): """Loads the class's state. Args: state_dict (dict): scaler state. Should be an object returned from a call to :meth:`state_dict`. """ self.__dict__.update(state_dict) def get_value(self): """Gets the current EMA decay rate.""" epoch = max(0, self.last_epoch - self.start_at) value = 1 - (1 + epoch / self.inv_gamma) ** -self.power return 0. if epoch < 0 else min(self.max_value, max(self.min_value, value)) def step(self): """Updates the step count.""" self.last_epoch += 1 class InverseLR(optim.lr_scheduler._LRScheduler): """Implements an inverse decay learning rate schedule with an optional exponential warmup. When last_epoch=-1, sets initial lr as lr. inv_gamma is the number of steps/epochs required for the learning rate to decay to (1 / 2)**power of its original value. Args: optimizer (Optimizer): Wrapped optimizer. inv_gamma (float): Inverse multiplicative factor of learning rate decay. Default: 1. power (float): Exponential factor of learning rate decay. Default: 1. warmup (float): Exponential warmup factor (0 <= warmup < 1, 0 to disable) Default: 0. min_lr (float): The minimum learning rate. Default: 0. last_epoch (int): The index of last epoch. Default: -1. verbose (bool): If ``True``, prints a message to stdout for each update. Default: ``False``. """ def __init__(self, optimizer, inv_gamma=1., power=1., warmup=0., min_lr=0., last_epoch=-1, verbose=False): self.inv_gamma = inv_gamma self.power = power if not 0. <= warmup < 1: raise ValueError('Invalid value for warmup') self.warmup = warmup self.min_lr = min_lr super().__init__(optimizer, last_epoch, verbose) def get_lr(self): if not self._get_lr_called_within_step: warnings.warn("To get the last learning rate computed by the scheduler, " "please use `get_last_lr()`.") return self._get_closed_form_lr() def _get_closed_form_lr(self): warmup = 1 - self.warmup ** (self.last_epoch + 1) lr_mult = (1 + self.last_epoch / self.inv_gamma) ** -self.power return [warmup * max(self.min_lr, base_lr * lr_mult) for base_lr in self.base_lrs] class ExponentialLR(optim.lr_scheduler._LRScheduler): """Implements an exponential learning rate schedule with an optional exponential warmup. When last_epoch=-1, sets initial lr as lr. Decays the learning rate continuously by decay (default 0.5) every num_steps steps. Args: optimizer (Optimizer): Wrapped optimizer. num_steps (float): The number of steps to decay the learning rate by decay in. decay (float): The factor by which to decay the learning rate every num_steps steps. Default: 0.5. warmup (float): Exponential warmup factor (0 <= warmup < 1, 0 to disable) Default: 0. min_lr (float): The minimum learning rate. Default: 0. last_epoch (int): The index of last epoch. Default: -1. verbose (bool): If ``True``, prints a message to stdout for each update. Default: ``False``. """ def __init__(self, optimizer, num_steps, decay=0.5, warmup=0., min_lr=0., last_epoch=-1, verbose=False): self.num_steps = num_steps self.decay = decay if not 0. <= warmup < 1: raise ValueError('Invalid value for warmup') self.warmup = warmup self.min_lr = min_lr super().__init__(optimizer, last_epoch, verbose) def get_lr(self): if not self._get_lr_called_within_step: warnings.warn("To get the last learning rate computed by the scheduler, " "please use `get_last_lr()`.") return self._get_closed_form_lr() def _get_closed_form_lr(self): warmup = 1 - self.warmup ** (self.last_epoch + 1) lr_mult = (self.decay ** (1 / self.num_steps)) ** self.last_epoch return [warmup * max(self.min_lr, base_lr * lr_mult) for base_lr in self.base_lrs] def rand_log_normal(shape, loc=0., scale=1., device='cpu', dtype=torch.float32): """Draws samples from an lognormal distribution.""" return (torch.randn(shape, device=device, dtype=dtype) * scale + loc).exp() def rand_log_logistic(shape, loc=0., scale=1., min_value=0., max_value=float('inf'), device='cpu', dtype=torch.float32): """Draws samples from an optionally truncated log-logistic distribution.""" min_value = torch.as_tensor(min_value, device=device, dtype=torch.float64) max_value = torch.as_tensor(max_value, device=device, dtype=torch.float64) min_cdf = min_value.log().sub(loc).div(scale).sigmoid() max_cdf = max_value.log().sub(loc).div(scale).sigmoid() u = torch.rand(shape, device=device, dtype=torch.float64) * (max_cdf - min_cdf) + min_cdf return u.logit().mul(scale).add(loc).exp().to(dtype) def rand_log_uniform(shape, min_value, max_value, device='cpu', dtype=torch.float32): """Draws samples from an log-uniform distribution.""" min_value = math.log(min_value) max_value = math.log(max_value) return (torch.rand(shape, device=device, dtype=dtype) * (max_value - min_value) + min_value).exp() def rand_v_diffusion(shape, sigma_data=1., min_value=0., max_value=float('inf'), device='cpu', dtype=torch.float32): """Draws samples from a truncated v-diffusion training timestep distribution.""" min_cdf = math.atan(min_value / sigma_data) * 2 / math.pi max_cdf = math.atan(max_value / sigma_data) * 2 / math.pi u = torch.rand(shape, device=device, dtype=dtype) * (max_cdf - min_cdf) + min_cdf return torch.tan(u * math.pi / 2) * sigma_data def rand_split_log_normal(shape, loc, scale_1, scale_2, device='cpu', dtype=torch.float32): """Draws samples from a split lognormal distribution.""" n = torch.randn(shape, device=device, dtype=dtype).abs() u = torch.rand(shape, device=device, dtype=dtype) n_left = n * -scale_1 + loc n_right = n * scale_2 + loc ratio = scale_1 / (scale_1 + scale_2) return torch.where(u < ratio, n_left, n_right).exp() class FolderOfImages(data.Dataset): """Recursively finds all images in a directory. It does not support classes/targets.""" IMG_EXTENSIONS = {'.jpg', '.jpeg', '.png', '.ppm', '.bmp', '.pgm', '.tif', '.tiff', '.webp'} def __init__(self, root, transform=None): super().__init__() self.root = Path(root) self.transform = nn.Identity() if transform is None else transform self.paths = sorted(path for path in self.root.rglob('*') if path.suffix.lower() in self.IMG_EXTENSIONS) def __repr__(self): return f'FolderOfImages(root="{self.root}", len: {len(self)})' def __len__(self): return len(self.paths) def __getitem__(self, key): path = self.paths[key] with open(path, 'rb') as f: image = Image.open(f).convert('RGB') image = self.transform(image) return image, class CSVLogger: def __init__(self, filename, columns): self.filename = Path(filename) self.columns = columns if self.filename.exists(): self.file = open(self.filename, 'a') else: self.file = open(self.filename, 'w') self.write(*self.columns) def write(self, *args): print(*args, sep=',', file=self.file, flush=True) @contextmanager def tf32_mode(cudnn=None, matmul=None): """A context manager that sets whether TF32 is allowed on cuDNN or matmul.""" cudnn_old = torch.backends.cudnn.allow_tf32 matmul_old = torch.backends.cuda.matmul.allow_tf32 try: if cudnn is not None: torch.backends.cudnn.allow_tf32 = cudnn if matmul is not None: torch.backends.cuda.matmul.allow_tf32 = matmul yield finally: if cudnn is not None: torch.backends.cudnn.allow_tf32 = cudnn_old if matmul is not None: torch.backends.cuda.matmul.allow_tf32 = matmul_old
import importlib import torch from torch import optim import numpy as np from inspect import isfunction from PIL import Image, ImageDraw, ImageFont def log_txt_as_img(wh, xc, size=10): b = len(xc) txts = list() for bi in range(b): txt = Image.new("RGB", wh, color="white") draw = ImageDraw.Draw(txt) font = ImageFont.truetype('data/DejaVuSans.ttf', size=size) nc = int(40 * (wh[0] / 256)) lines = "\n".join(xc[bi][start:start + nc] for start in range(0, len(xc[bi]), nc)) try: draw.text((0, 0), lines, fill="black", font=font) except UnicodeEncodeError: print("Cant encode string for logging. Skipping.") txt = np.array(txt).transpose(2, 0, 1) / 127.5 - 1.0 txts.append(txt) txts = np.stack(txts) txts = torch.tensor(txts) return txts def ismap(x): if not isinstance(x, torch.Tensor): return False return (len(x.shape) == 4) and (x.shape[1] > 3) def isimage(x): if not isinstance(x,torch.Tensor): return False return (len(x.shape) == 4) and (x.shape[1] == 3 or x.shape[1] == 1) def exists(x): return x is not None def default(val, d): if exists(val): return val return d() if isfunction(d) else d def mean_flat(tensor): """ https: Take the mean over all non-batch dimensions. """ return tensor.mean(dim=list(range(1, len(tensor.shape)))) def count_params(model, verbose=False): total_params = sum(p.numel() for p in model.parameters()) if verbose: print(f"{model.__class__.__name__} has {total_params*1.e-6:.2f} M params.") return total_params def instantiate_from_config(config): if not "target" in config: if config == '__is_first_stage__': return None elif config == "__is_unconditional__": return None raise KeyError("Expected key `target` to instantiate.") return get_obj_from_str(config["target"])(**config.get("params", dict())) def get_obj_from_str(string, reload=False): module, cls = string.rsplit(".", 1) if reload: module_imp = importlib.import_module(module) importlib.reload(module_imp) return getattr(importlib.import_module(module, package=None), cls) class AdamWwithEMAandWings(optim.Optimizer): def __init__(self, params, lr=1.e-3, betas=(0.9, 0.999), eps=1.e-8, weight_decay=1.e-2, amsgrad=False, ema_decay=0.9999, ema_power=1., param_names=()): """AdamW that saves EMA versions of the parameters.""" if not 0.0 <= lr: raise ValueError("Invalid learning rate: {}".format(lr)) if not 0.0 <= eps: raise ValueError("Invalid epsilon value: {}".format(eps)) if not 0.0 <= betas[0] < 1.0: raise ValueError("Invalid beta parameter at index 0: {}".format(betas[0])) if not 0.0 <= betas[1] < 1.0: raise ValueError("Invalid beta parameter at index 1: {}".format(betas[1])) if not 0.0 <= weight_decay: raise ValueError("Invalid weight_decay value: {}".format(weight_decay)) if not 0.0 <= ema_decay <= 1.0: raise ValueError("Invalid ema_decay value: {}".format(ema_decay)) defaults = dict(lr=lr, betas=betas, eps=eps, weight_decay=weight_decay, amsgrad=amsgrad, ema_decay=ema_decay, ema_power=ema_power, param_names=param_names) super().__init__(params, defaults) def __setstate__(self, state): super().__setstate__(state) for group in self.param_groups: group.setdefault('amsgrad', False) @torch.no_grad() def step(self, closure=None): """Performs a single optimization step. Args: closure (callable, optional): A closure that reevaluates the model and returns the loss. """ loss = None if closure is not None: with torch.enable_grad(): loss = closure() for group in self.param_groups: params_with_grad = [] grads = [] exp_avgs = [] exp_avg_sqs = [] ema_params_with_grad = [] state_sums = [] max_exp_avg_sqs = [] state_steps = [] amsgrad = group['amsgrad'] beta1, beta2 = group['betas'] ema_decay = group['ema_decay'] ema_power = group['ema_power'] for p in group['params']: if p.grad is None: continue params_with_grad.append(p) if p.grad.is_sparse: raise RuntimeError('AdamW does not support sparse gradients') grads.append(p.grad) state = self.state[p] if len(state) == 0: state['step'] = 0 state['exp_avg'] = torch.zeros_like(p, memory_format=torch.preserve_format) state['exp_avg_sq'] = torch.zeros_like(p, memory_format=torch.preserve_format) if amsgrad: state['max_exp_avg_sq'] = torch.zeros_like(p, memory_format=torch.preserve_format) state['param_exp_avg'] = p.detach().float().clone() exp_avgs.append(state['exp_avg']) exp_avg_sqs.append(state['exp_avg_sq']) ema_params_with_grad.append(state['param_exp_avg']) if amsgrad: max_exp_avg_sqs.append(state['max_exp_avg_sq']) state['step'] += 1 state_steps.append(state['step']) optim._functional.adamw(params_with_grad, grads, exp_avgs, exp_avg_sqs, max_exp_avg_sqs, state_steps, amsgrad=amsgrad, beta1=beta1, beta2=beta2, lr=group['lr'], weight_decay=group['weight_decay'], eps=group['eps'], maximize=False) cur_ema_decay = min(ema_decay, 1 - state['step'] ** -ema_power) for param, ema_param in zip(params_with_grad, ema_params_with_grad): ema_param.mul_(cur_ema_decay).add_(param.float(), alpha=1 - cur_ema_decay) return loss
import torch from torch import nn from typing import Literal, Dict, Any import math import comfy.ops ops = comfy.ops.disable_weight_init def vae_sample(mean, scale): stdev = nn.functional.softplus(scale) + 1e-4 var = stdev * stdev logvar = torch.log(var) latents = torch.randn_like(mean) * stdev + mean kl = (mean * mean + var - logvar - 1).sum(1).mean() return latents, kl class VAEBottleneck(nn.Module): def __init__(self): super().__init__() self.is_discrete = False def encode(self, x, return_info=False, **kwargs): info = {} mean, scale = x.chunk(2, dim=1) x, kl = vae_sample(mean, scale) info["kl"] = kl if return_info: return x, info else: return x def decode(self, x): return x def snake_beta(x, alpha, beta): return x + (1.0 / (beta + 0.000000001)) * pow(torch.sin(x * alpha), 2) class SnakeBeta(nn.Module): def __init__(self, in_features, alpha=1.0, alpha_trainable=True, alpha_logscale=True): super(SnakeBeta, self).__init__() self.in_features = in_features self.alpha_logscale = alpha_logscale if self.alpha_logscale: self.alpha = nn.Parameter(torch.zeros(in_features) * alpha) self.beta = nn.Parameter(torch.zeros(in_features) * alpha) else: self.alpha = nn.Parameter(torch.ones(in_features) * alpha) self.beta = nn.Parameter(torch.ones(in_features) * alpha) self.no_div_by_zero = 0.000000001 def forward(self, x): alpha = self.alpha.unsqueeze(0).unsqueeze(-1).to(x.device) beta = self.beta.unsqueeze(0).unsqueeze(-1).to(x.device) if self.alpha_logscale: alpha = torch.exp(alpha) beta = torch.exp(beta) x = snake_beta(x, alpha, beta) return x def WNConv1d(*args, **kwargs): try: return torch.nn.utils.parametrizations.weight_norm(ops.Conv1d(*args, **kwargs)) except: return torch.nn.utils.weight_norm(ops.Conv1d(*args, **kwargs)) def WNConvTranspose1d(*args, **kwargs): try: return torch.nn.utils.parametrizations.weight_norm(ops.ConvTranspose1d(*args, **kwargs)) except: return torch.nn.utils.weight_norm(ops.ConvTranspose1d(*args, **kwargs)) def get_activation(activation: Literal["elu", "snake", "none"], antialias=False, channels=None) -> nn.Module: if activation == "elu": act = torch.nn.ELU() elif activation == "snake": act = SnakeBeta(channels) elif activation == "none": act = torch.nn.Identity() else: raise ValueError(f"Unknown activation {activation}") if antialias: act = Activation1d(act) return act class ResidualUnit(nn.Module): def __init__(self, in_channels, out_channels, dilation, use_snake=False, antialias_activation=False): super().__init__() self.dilation = dilation padding = (dilation * (7-1)) self.layers = nn.Sequential( get_activation("snake" if use_snake else "elu", antialias=antialias_activation, channels=out_channels), WNConv1d(in_channels=in_channels, out_channels=out_channels, kernel_size=7, dilation=dilation, padding=padding), get_activation("snake" if use_snake else "elu", antialias=antialias_activation, channels=out_channels), WNConv1d(in_channels=out_channels, out_channels=out_channels, kernel_size=1) ) def forward(self, x): res = x x = self.layers(x) return x + res class EncoderBlock(nn.Module): def __init__(self, in_channels, out_channels, stride, use_snake=False, antialias_activation=False): super().__init__() self.layers = nn.Sequential( ResidualUnit(in_channels=in_channels, out_channels=in_channels, dilation=1, use_snake=use_snake), ResidualUnit(in_channels=in_channels, out_channels=in_channels, dilation=3, use_snake=use_snake), ResidualUnit(in_channels=in_channels, out_channels=in_channels, dilation=9, use_snake=use_snake), get_activation("snake" if use_snake else "elu", antialias=antialias_activation, channels=in_channels), WNConv1d(in_channels=in_channels, out_channels=out_channels, kernel_size=2*stride, stride=stride, padding=math.ceil(stride/2)), ) def forward(self, x): return self.layers(x) class DecoderBlock(nn.Module): def __init__(self, in_channels, out_channels, stride, use_snake=False, antialias_activation=False, use_nearest_upsample=False): super().__init__() if use_nearest_upsample: upsample_layer = nn.Sequential( nn.Upsample(scale_factor=stride, mode="nearest"), WNConv1d(in_channels=in_channels, out_channels=out_channels, kernel_size=2*stride, stride=1, bias=False, padding='same') ) else: upsample_layer = WNConvTranspose1d(in_channels=in_channels, out_channels=out_channels, kernel_size=2*stride, stride=stride, padding=math.ceil(stride/2)) self.layers = nn.Sequential( get_activation("snake" if use_snake else "elu", antialias=antialias_activation, channels=in_channels), upsample_layer, ResidualUnit(in_channels=out_channels, out_channels=out_channels, dilation=1, use_snake=use_snake), ResidualUnit(in_channels=out_channels, out_channels=out_channels, dilation=3, use_snake=use_snake), ResidualUnit(in_channels=out_channels, out_channels=out_channels, dilation=9, use_snake=use_snake), ) def forward(self, x): return self.layers(x) class OobleckEncoder(nn.Module): def __init__(self, in_channels=2, channels=128, latent_dim=32, c_mults = [1, 2, 4, 8], strides = [2, 4, 8, 8], use_snake=False, antialias_activation=False ): super().__init__() c_mults = [1] + c_mults self.depth = len(c_mults) layers = [ WNConv1d(in_channels=in_channels, out_channels=c_mults[0] * channels, kernel_size=7, padding=3) ] for i in range(self.depth-1): layers += [EncoderBlock(in_channels=c_mults[i]*channels, out_channels=c_mults[i+1]*channels, stride=strides[i], use_snake=use_snake)] layers += [ get_activation("snake" if use_snake else "elu", antialias=antialias_activation, channels=c_mults[-1] * channels), WNConv1d(in_channels=c_mults[-1]*channels, out_channels=latent_dim, kernel_size=3, padding=1) ] self.layers = nn.Sequential(*layers) def forward(self, x): return self.layers(x) class OobleckDecoder(nn.Module): def __init__(self, out_channels=2, channels=128, latent_dim=32, c_mults = [1, 2, 4, 8], strides = [2, 4, 8, 8], use_snake=False, antialias_activation=False, use_nearest_upsample=False, final_tanh=True): super().__init__() c_mults = [1] + c_mults self.depth = len(c_mults) layers = [ WNConv1d(in_channels=latent_dim, out_channels=c_mults[-1]*channels, kernel_size=7, padding=3), ] for i in range(self.depth-1, 0, -1): layers += [DecoderBlock( in_channels=c_mults[i]*channels, out_channels=c_mults[i-1]*channels, stride=strides[i-1], use_snake=use_snake, antialias_activation=antialias_activation, use_nearest_upsample=use_nearest_upsample ) ] layers += [ get_activation("snake" if use_snake else "elu", antialias=antialias_activation, channels=c_mults[0] * channels), WNConv1d(in_channels=c_mults[0] * channels, out_channels=out_channels, kernel_size=7, padding=3, bias=False), nn.Tanh() if final_tanh else nn.Identity() ] self.layers = nn.Sequential(*layers) def forward(self, x): return self.layers(x) class AudioOobleckVAE(nn.Module): def __init__(self, in_channels=2, channels=128, latent_dim=64, c_mults = [1, 2, 4, 8, 16], strides = [2, 4, 4, 8, 8], use_snake=True, antialias_activation=False, use_nearest_upsample=False, final_tanh=False): super().__init__() self.encoder = OobleckEncoder(in_channels, channels, latent_dim * 2, c_mults, strides, use_snake, antialias_activation) self.decoder = OobleckDecoder(in_channels, channels, latent_dim, c_mults, strides, use_snake, antialias_activation, use_nearest_upsample=use_nearest_upsample, final_tanh=final_tanh) self.bottleneck = VAEBottleneck() def encode(self, x): return self.bottleneck.encode(self.encoder(x)) def decode(self, x): return self.decoder(self.bottleneck.decode(x))
from comfy.ldm.modules.attention import optimized_attention import typing as tp import torch from einops import rearrange from torch import nn from torch.nn import functional as F import math class FourierFeatures(nn.Module): def __init__(self, in_features, out_features, std=1., dtype=None, device=None): super().__init__() assert out_features % 2 == 0 self.weight = nn.Parameter(torch.empty( [out_features def forward(self, input): f = 2 * math.pi * input @ self.weight.T.to(dtype=input.dtype, device=input.device) return torch.cat([f.cos(), f.sin()], dim=-1) class LayerNorm(nn.Module): def __init__(self, dim, bias=False, fix_scale=False, dtype=None, device=None): """ bias-less layernorm has been shown to be more stable. most newer models have moved towards rmsnorm, also bias-less """ super().__init__() self.gamma = nn.Parameter(torch.empty(dim, dtype=dtype, device=device)) if bias: self.beta = nn.Parameter(torch.empty(dim, dtype=dtype, device=device)) else: self.beta = None def forward(self, x): beta = self.beta if self.beta is not None: beta = beta.to(dtype=x.dtype, device=x.device) return F.layer_norm(x, x.shape[-1:], weight=self.gamma.to(dtype=x.dtype, device=x.device), bias=beta) class GLU(nn.Module): def __init__( self, dim_in, dim_out, activation, use_conv = False, conv_kernel_size = 3, dtype=None, device=None, operations=None, ): super().__init__() self.act = activation self.proj = operations.Linear(dim_in, dim_out * 2, dtype=dtype, device=device) if not use_conv else operations.Conv1d(dim_in, dim_out * 2, conv_kernel_size, padding = (conv_kernel_size self.use_conv = use_conv def forward(self, x): if self.use_conv: x = rearrange(x, 'b n d -> b d n') x = self.proj(x) x = rearrange(x, 'b d n -> b n d') else: x = self.proj(x) x, gate = x.chunk(2, dim = -1) return x * self.act(gate) class AbsolutePositionalEmbedding(nn.Module): def __init__(self, dim, max_seq_len): super().__init__() self.scale = dim ** -0.5 self.max_seq_len = max_seq_len self.emb = nn.Embedding(max_seq_len, dim) def forward(self, x, pos = None, seq_start_pos = None): seq_len, device = x.shape[1], x.device assert seq_len <= self.max_seq_len, f'you are passing in a sequence length of {seq_len} but your absolute positional embedding has a max sequence length of {self.max_seq_len}' if pos is None: pos = torch.arange(seq_len, device = device) if seq_start_pos is not None: pos = (pos - seq_start_pos[..., None]).clamp(min = 0) pos_emb = self.emb(pos) pos_emb = pos_emb * self.scale return pos_emb class ScaledSinusoidalEmbedding(nn.Module): def __init__(self, dim, theta = 10000): super().__init__() assert (dim % 2) == 0, 'dimension must be divisible by 2' self.scale = nn.Parameter(torch.ones(1) * dim ** -0.5) half_dim = dim freq_seq = torch.arange(half_dim).float() / half_dim inv_freq = theta ** -freq_seq self.register_buffer('inv_freq', inv_freq, persistent = False) def forward(self, x, pos = None, seq_start_pos = None): seq_len, device = x.shape[1], x.device if pos is None: pos = torch.arange(seq_len, device = device) if seq_start_pos is not None: pos = pos - seq_start_pos[..., None] emb = torch.einsum('i, j -> i j', pos, self.inv_freq) emb = torch.cat((emb.sin(), emb.cos()), dim = -1) return emb * self.scale class RotaryEmbedding(nn.Module): def __init__( self, dim, use_xpos = False, scale_base = 512, interpolation_factor = 1., base = 10000, base_rescale_factor = 1. ): super().__init__() base *= base_rescale_factor ** (dim / (dim - 2)) inv_freq = 1. / (base ** (torch.arange(0, dim, 2).float() / dim)) self.register_buffer('inv_freq', inv_freq) assert interpolation_factor >= 1. self.interpolation_factor = interpolation_factor if not use_xpos: self.register_buffer('scale', None) return scale = (torch.arange(0, dim, 2) + 0.4 * dim) / (1.4 * dim) self.scale_base = scale_base self.register_buffer('scale', scale) def forward_from_seq_len(self, seq_len, device, dtype): t = torch.arange(seq_len, device=device, dtype=dtype) return self.forward(t) def forward(self, t): device = t.device dtype = t.dtype t = t / self.interpolation_factor freqs = torch.einsum('i , j -> i j', t, self.inv_freq.to(dtype=dtype, device=device)) freqs = torch.cat((freqs, freqs), dim = -1) if self.scale is None: return freqs, 1. power = (torch.arange(seq_len, device = device) - (seq_len scale = self.scale.to(dtype=dtype, device=device) ** rearrange(power, 'n -> n 1') scale = torch.cat((scale, scale), dim = -1) return freqs, scale def rotate_half(x): x = rearrange(x, '... (j d) -> ... j d', j = 2) x1, x2 = x.unbind(dim = -2) return torch.cat((-x2, x1), dim = -1) def apply_rotary_pos_emb(t, freqs, scale = 1): out_dtype = t.dtype dtype = t.dtype rot_dim, seq_len = freqs.shape[-1], t.shape[-2] freqs, t = freqs.to(dtype), t.to(dtype) freqs = freqs[-seq_len:, :] if t.ndim == 4 and freqs.ndim == 3: freqs = rearrange(freqs, 'b n d -> b 1 n d') t, t_unrotated = t[..., :rot_dim], t[..., rot_dim:] t = (t * freqs.cos() * scale) + (rotate_half(t) * freqs.sin() * scale) t, t_unrotated = t.to(out_dtype), t_unrotated.to(out_dtype) return torch.cat((t, t_unrotated), dim = -1) class FeedForward(nn.Module): def __init__( self, dim, dim_out = None, mult = 4, no_bias = False, glu = True, use_conv = False, conv_kernel_size = 3, zero_init_output = True, dtype=None, device=None, operations=None, ): super().__init__() inner_dim = int(dim * mult) activation = nn.SiLU() dim_out = dim if dim_out is None else dim_out if glu: linear_in = GLU(dim, inner_dim, activation, dtype=dtype, device=device, operations=operations) else: linear_in = nn.Sequential( Rearrange('b n d -> b d n') if use_conv else nn.Identity(), operations.Linear(dim, inner_dim, bias = not no_bias, dtype=dtype, device=device) if not use_conv else operations.Conv1d(dim, inner_dim, conv_kernel_size, padding = (conv_kernel_size Rearrange('b n d -> b d n') if use_conv else nn.Identity(), activation ) linear_out = operations.Linear(inner_dim, dim_out, bias = not no_bias, dtype=dtype, device=device) if not use_conv else operations.Conv1d(inner_dim, dim_out, conv_kernel_size, padding = (conv_kernel_size self.ff = nn.Sequential( linear_in, Rearrange('b d n -> b n d') if use_conv else nn.Identity(), linear_out, Rearrange('b n d -> b d n') if use_conv else nn.Identity(), ) def forward(self, x): return self.ff(x) class Attention(nn.Module): def __init__( self, dim, dim_heads = 64, dim_context = None, causal = False, zero_init_output=True, qk_norm = False, natten_kernel_size = None, dtype=None, device=None, operations=None, ): super().__init__() self.dim = dim self.dim_heads = dim_heads self.causal = causal dim_kv = dim_context if dim_context is not None else dim self.num_heads = dim self.kv_heads = dim_kv if dim_context is not None: self.to_q = operations.Linear(dim, dim, bias=False, dtype=dtype, device=device) self.to_kv = operations.Linear(dim_kv, dim_kv * 2, bias=False, dtype=dtype, device=device) else: self.to_qkv = operations.Linear(dim, dim * 3, bias=False, dtype=dtype, device=device) self.to_out = operations.Linear(dim, dim, bias=False, dtype=dtype, device=device) self.qk_norm = qk_norm def forward( self, x, context = None, mask = None, context_mask = None, rotary_pos_emb = None, causal = None ): h, kv_h, has_context = self.num_heads, self.kv_heads, context is not None kv_input = context if has_context else x if hasattr(self, 'to_q'): q = self.to_q(x) q = rearrange(q, 'b n (h d) -> b h n d', h = h) k, v = self.to_kv(kv_input).chunk(2, dim=-1) k, v = map(lambda t: rearrange(t, 'b n (h d) -> b h n d', h = kv_h), (k, v)) else: q, k, v = self.to_qkv(x).chunk(3, dim=-1) q, k, v = map(lambda t: rearrange(t, 'b n (h d) -> b h n d', h = h), (q, k, v)) if self.qk_norm: q = F.normalize(q, dim=-1) k = F.normalize(k, dim=-1) if rotary_pos_emb is not None and not has_context: freqs, _ = rotary_pos_emb q_dtype = q.dtype k_dtype = k.dtype q = q.to(torch.float32) k = k.to(torch.float32) freqs = freqs.to(torch.float32) q = apply_rotary_pos_emb(q, freqs) k = apply_rotary_pos_emb(k, freqs) q = q.to(q_dtype) k = k.to(k_dtype) input_mask = context_mask if input_mask is None and not has_context: input_mask = mask masks = [] final_attn_mask = None if input_mask is not None: input_mask = rearrange(input_mask, 'b j -> b 1 1 j') masks.append(~input_mask) if len(masks) > 0: final_attn_mask = ~or_reduce(masks) n, device = q.shape[-2], q.device causal = self.causal if causal is None else causal if n == 1 and causal: causal = False if h != kv_h: heads_per_kv_head = h k, v = map(lambda t: t.repeat_interleave(heads_per_kv_head, dim = 1), (k, v)) out = optimized_attention(q, k, v, h, skip_reshape=True) out = self.to_out(out) if mask is not None: mask = rearrange(mask, 'b n -> b n 1') out = out.masked_fill(~mask, 0.) return out class ConformerModule(nn.Module): def __init__( self, dim, norm_kwargs = {}, ): super().__init__() self.dim = dim self.in_norm = LayerNorm(dim, **norm_kwargs) self.pointwise_conv = nn.Conv1d(dim, dim, kernel_size=1, bias=False) self.glu = GLU(dim, dim, nn.SiLU()) self.depthwise_conv = nn.Conv1d(dim, dim, kernel_size=17, groups=dim, padding=8, bias=False) self.mid_norm = LayerNorm(dim, **norm_kwargs) self.swish = nn.SiLU() self.pointwise_conv_2 = nn.Conv1d(dim, dim, kernel_size=1, bias=False) def forward(self, x): x = self.in_norm(x) x = rearrange(x, 'b n d -> b d n') x = self.pointwise_conv(x) x = rearrange(x, 'b d n -> b n d') x = self.glu(x) x = rearrange(x, 'b n d -> b d n') x = self.depthwise_conv(x) x = rearrange(x, 'b d n -> b n d') x = self.mid_norm(x) x = self.swish(x) x = rearrange(x, 'b n d -> b d n') x = self.pointwise_conv_2(x) x = rearrange(x, 'b d n -> b n d') return x class TransformerBlock(nn.Module): def __init__( self, dim, dim_heads = 64, cross_attend = False, dim_context = None, global_cond_dim = None, causal = False, zero_init_branch_outputs = True, conformer = False, layer_ix = -1, remove_norms = False, attn_kwargs = {}, ff_kwargs = {}, norm_kwargs = {}, dtype=None, device=None, operations=None, ): super().__init__() self.dim = dim self.dim_heads = dim_heads self.cross_attend = cross_attend self.dim_context = dim_context self.causal = causal self.pre_norm = LayerNorm(dim, dtype=dtype, device=device, **norm_kwargs) if not remove_norms else nn.Identity() self.self_attn = Attention( dim, dim_heads = dim_heads, causal = causal, zero_init_output=zero_init_branch_outputs, dtype=dtype, device=device, operations=operations, **attn_kwargs ) if cross_attend: self.cross_attend_norm = LayerNorm(dim, dtype=dtype, device=device, **norm_kwargs) if not remove_norms else nn.Identity() self.cross_attn = Attention( dim, dim_heads = dim_heads, dim_context=dim_context, causal = causal, zero_init_output=zero_init_branch_outputs, dtype=dtype, device=device, operations=operations, **attn_kwargs ) self.ff_norm = LayerNorm(dim, dtype=dtype, device=device, **norm_kwargs) if not remove_norms else nn.Identity() self.ff = FeedForward(dim, zero_init_output=zero_init_branch_outputs, dtype=dtype, device=device, operations=operations,**ff_kwargs) self.layer_ix = layer_ix self.conformer = ConformerModule(dim, norm_kwargs=norm_kwargs) if conformer else None self.global_cond_dim = global_cond_dim if global_cond_dim is not None: self.to_scale_shift_gate = nn.Sequential( nn.SiLU(), nn.Linear(global_cond_dim, dim * 6, bias=False) ) nn.init.zeros_(self.to_scale_shift_gate[1].weight) def forward( self, x, context = None, global_cond=None, mask = None, context_mask = None, rotary_pos_emb = None ): if self.global_cond_dim is not None and self.global_cond_dim > 0 and global_cond is not None: scale_self, shift_self, gate_self, scale_ff, shift_ff, gate_ff = self.to_scale_shift_gate(global_cond).unsqueeze(1).chunk(6, dim = -1) residual = x x = self.pre_norm(x) x = x * (1 + scale_self) + shift_self x = self.self_attn(x, mask = mask, rotary_pos_emb = rotary_pos_emb) x = x * torch.sigmoid(1 - gate_self) x = x + residual if context is not None: x = x + self.cross_attn(self.cross_attend_norm(x), context = context, context_mask = context_mask) if self.conformer is not None: x = x + self.conformer(x) residual = x x = self.ff_norm(x) x = x * (1 + scale_ff) + shift_ff x = self.ff(x) x = x * torch.sigmoid(1 - gate_ff) x = x + residual else: x = x + self.self_attn(self.pre_norm(x), mask = mask, rotary_pos_emb = rotary_pos_emb) if context is not None: x = x + self.cross_attn(self.cross_attend_norm(x), context = context, context_mask = context_mask) if self.conformer is not None: x = x + self.conformer(x) x = x + self.ff(self.ff_norm(x)) return x class ContinuousTransformer(nn.Module): def __init__( self, dim, depth, *, dim_in = None, dim_out = None, dim_heads = 64, cross_attend=False, cond_token_dim=None, global_cond_dim=None, causal=False, rotary_pos_emb=True, zero_init_branch_outputs=True, conformer=False, use_sinusoidal_emb=False, use_abs_pos_emb=False, abs_pos_emb_max_length=10000, dtype=None, device=None, operations=None, **kwargs ): super().__init__() self.dim = dim self.depth = depth self.causal = causal self.layers = nn.ModuleList([]) self.project_in = operations.Linear(dim_in, dim, bias=False, dtype=dtype, device=device) if dim_in is not None else nn.Identity() self.project_out = operations.Linear(dim, dim_out, bias=False, dtype=dtype, device=device) if dim_out is not None else nn.Identity() if rotary_pos_emb: self.rotary_pos_emb = RotaryEmbedding(max(dim_heads else: self.rotary_pos_emb = None self.use_sinusoidal_emb = use_sinusoidal_emb if use_sinusoidal_emb: self.pos_emb = ScaledSinusoidalEmbedding(dim) self.use_abs_pos_emb = use_abs_pos_emb if use_abs_pos_emb: self.pos_emb = AbsolutePositionalEmbedding(dim, abs_pos_emb_max_length) for i in range(depth): self.layers.append( TransformerBlock( dim, dim_heads = dim_heads, cross_attend = cross_attend, dim_context = cond_token_dim, global_cond_dim = global_cond_dim, causal = causal, zero_init_branch_outputs = zero_init_branch_outputs, conformer=conformer, layer_ix=i, dtype=dtype, device=device, operations=operations, **kwargs ) ) def forward( self, x, mask = None, prepend_embeds = None, prepend_mask = None, global_cond = None, return_info = False, **kwargs ): batch, seq, device = *x.shape[:2], x.device info = { "hidden_states": [], } x = self.project_in(x) if prepend_embeds is not None: prepend_length, prepend_dim = prepend_embeds.shape[1:] assert prepend_dim == x.shape[-1], 'prepend dimension must match sequence dimension' x = torch.cat((prepend_embeds, x), dim = -2) if prepend_mask is not None or mask is not None: mask = mask if mask is not None else torch.ones((batch, seq), device = device, dtype = torch.bool) prepend_mask = prepend_mask if prepend_mask is not None else torch.ones((batch, prepend_length), device = device, dtype = torch.bool) mask = torch.cat((prepend_mask, mask), dim = -1) if self.rotary_pos_emb is not None: rotary_pos_emb = self.rotary_pos_emb.forward_from_seq_len(x.shape[1], dtype=x.dtype, device=x.device) else: rotary_pos_emb = None if self.use_sinusoidal_emb or self.use_abs_pos_emb: x = x + self.pos_emb(x) for layer in self.layers: x = layer(x, rotary_pos_emb = rotary_pos_emb, global_cond=global_cond, **kwargs) if return_info: info["hidden_states"].append(x) x = self.project_out(x) if return_info: return x, info return x class AudioDiffusionTransformer(nn.Module): def __init__(self, io_channels=64, patch_size=1, embed_dim=1536, cond_token_dim=768, project_cond_tokens=False, global_cond_dim=1536, project_global_cond=True, input_concat_dim=0, prepend_cond_dim=0, depth=24, num_heads=24, transformer_type: tp.Literal["continuous_transformer"] = "continuous_transformer", global_cond_type: tp.Literal["prepend", "adaLN"] = "prepend", audio_model="", dtype=None, device=None, operations=None, **kwargs): super().__init__() self.dtype = dtype self.cond_token_dim = cond_token_dim timestep_features_dim = 256 self.timestep_features = FourierFeatures(1, timestep_features_dim, dtype=dtype, device=device) self.to_timestep_embed = nn.Sequential( operations.Linear(timestep_features_dim, embed_dim, bias=True, dtype=dtype, device=device), nn.SiLU(), operations.Linear(embed_dim, embed_dim, bias=True, dtype=dtype, device=device), ) if cond_token_dim > 0: cond_embed_dim = cond_token_dim if not project_cond_tokens else embed_dim self.to_cond_embed = nn.Sequential( operations.Linear(cond_token_dim, cond_embed_dim, bias=False, dtype=dtype, device=device), nn.SiLU(), operations.Linear(cond_embed_dim, cond_embed_dim, bias=False, dtype=dtype, device=device) ) else: cond_embed_dim = 0 if global_cond_dim > 0: global_embed_dim = global_cond_dim if not project_global_cond else embed_dim self.to_global_embed = nn.Sequential( operations.Linear(global_cond_dim, global_embed_dim, bias=False, dtype=dtype, device=device), nn.SiLU(), operations.Linear(global_embed_dim, global_embed_dim, bias=False, dtype=dtype, device=device) ) if prepend_cond_dim > 0: self.to_prepend_embed = nn.Sequential( operations.Linear(prepend_cond_dim, embed_dim, bias=False, dtype=dtype, device=device), nn.SiLU(), operations.Linear(embed_dim, embed_dim, bias=False, dtype=dtype, device=device) ) self.input_concat_dim = input_concat_dim dim_in = io_channels + self.input_concat_dim self.patch_size = patch_size self.transformer_type = transformer_type self.global_cond_type = global_cond_type if self.transformer_type == "continuous_transformer": global_dim = None if self.global_cond_type == "adaLN": global_dim = embed_dim self.transformer = ContinuousTransformer( dim=embed_dim, depth=depth, dim_heads=embed_dim dim_in=dim_in * patch_size, dim_out=io_channels * patch_size, cross_attend = cond_token_dim > 0, cond_token_dim = cond_embed_dim, global_cond_dim=global_dim, dtype=dtype, device=device, operations=operations, **kwargs ) else: raise ValueError(f"Unknown transformer type: {self.transformer_type}") self.preprocess_conv = operations.Conv1d(dim_in, dim_in, 1, bias=False, dtype=dtype, device=device) self.postprocess_conv = operations.Conv1d(io_channels, io_channels, 1, bias=False, dtype=dtype, device=device) def _forward( self, x, t, mask=None, cross_attn_cond=None, cross_attn_cond_mask=None, input_concat_cond=None, global_embed=None, prepend_cond=None, prepend_cond_mask=None, return_info=False, **kwargs): if cross_attn_cond is not None: cross_attn_cond = self.to_cond_embed(cross_attn_cond) if global_embed is not None: global_embed = self.to_global_embed(global_embed) prepend_inputs = None prepend_mask = None prepend_length = 0 if prepend_cond is not None: prepend_cond = self.to_prepend_embed(prepend_cond) prepend_inputs = prepend_cond if prepend_cond_mask is not None: prepend_mask = prepend_cond_mask if input_concat_cond is not None: if input_concat_cond.shape[2] != x.shape[2]: input_concat_cond = F.interpolate(input_concat_cond, (x.shape[2], ), mode='nearest') x = torch.cat([x, input_concat_cond], dim=1) timestep_embed = self.to_timestep_embed(self.timestep_features(t[:, None]).to(x.dtype)) if global_embed is not None: global_embed = global_embed + timestep_embed else: global_embed = timestep_embed if self.global_cond_type == "prepend": if prepend_inputs is None: prepend_inputs = global_embed.unsqueeze(1) prepend_mask = torch.ones((x.shape[0], 1), device=x.device, dtype=torch.bool) else: prepend_inputs = torch.cat([prepend_inputs, global_embed.unsqueeze(1)], dim=1) prepend_mask = torch.cat([prepend_mask, torch.ones((x.shape[0], 1), device=x.device, dtype=torch.bool)], dim=1) prepend_length = prepend_inputs.shape[1] x = self.preprocess_conv(x) + x x = rearrange(x, "b c t -> b t c") extra_args = {} if self.global_cond_type == "adaLN": extra_args["global_cond"] = global_embed if self.patch_size > 1: x = rearrange(x, "b (t p) c -> b t (c p)", p=self.patch_size) if self.transformer_type == "x-transformers": output = self.transformer(x, prepend_embeds=prepend_inputs, context=cross_attn_cond, context_mask=cross_attn_cond_mask, mask=mask, prepend_mask=prepend_mask, **extra_args, **kwargs) elif self.transformer_type == "continuous_transformer": output = self.transformer(x, prepend_embeds=prepend_inputs, context=cross_attn_cond, context_mask=cross_attn_cond_mask, mask=mask, prepend_mask=prepend_mask, return_info=return_info, **extra_args, **kwargs) if return_info: output, info = output elif self.transformer_type == "mm_transformer": output = self.transformer(x, context=cross_attn_cond, mask=mask, context_mask=cross_attn_cond_mask, **extra_args, **kwargs) output = rearrange(output, "b t c -> b c t")[:,:,prepend_length:] if self.patch_size > 1: output = rearrange(output, "b (c p) t -> b c (t p)", p=self.patch_size) output = self.postprocess_conv(output) + output if return_info: return output, info return output def forward( self, x, timestep, context=None, context_mask=None, input_concat_cond=None, global_embed=None, negative_global_embed=None, prepend_cond=None, prepend_cond_mask=None, mask=None, return_info=False, control=None, transformer_options={}, **kwargs): return self._forward( x, timestep, cross_attn_cond=context, cross_attn_cond_mask=context_mask, input_concat_cond=input_concat_cond, global_embed=global_embed, prepend_cond=prepend_cond, prepend_cond_mask=prepend_cond_mask, mask=mask, return_info=return_info, **kwargs )
import torch import torch.nn as nn from torch import Tensor, einsum from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, TypeVar, Union from einops import rearrange import math import comfy.ops class LearnedPositionalEmbedding(nn.Module): """Used for continuous time""" def __init__(self, dim: int): super().__init__() assert (dim % 2) == 0 half_dim = dim self.weights = nn.Parameter(torch.empty(half_dim)) def forward(self, x: Tensor) -> Tensor: x = rearrange(x, "b -> b 1") freqs = x * rearrange(self.weights, "d -> 1 d") * 2 * math.pi fouriered = torch.cat((freqs.sin(), freqs.cos()), dim=-1) fouriered = torch.cat((x, fouriered), dim=-1) return fouriered def TimePositionalEmbedding(dim: int, out_features: int) -> nn.Module: return nn.Sequential( LearnedPositionalEmbedding(dim), comfy.ops.manual_cast.Linear(in_features=dim + 1, out_features=out_features), ) class NumberEmbedder(nn.Module): def __init__( self, features: int, dim: int = 256, ): super().__init__() self.features = features self.embedding = TimePositionalEmbedding(dim=dim, out_features=features) def forward(self, x: Union[List[float], Tensor]) -> Tensor: if not torch.is_tensor(x): device = next(self.embedding.parameters()).device x = torch.tensor(x, device=device) assert isinstance(x, Tensor) shape = x.shape x = rearrange(x, "... -> (...)") embedding = self.embedding(x) x = embedding.view(*shape, self.features) return x class Conditioner(nn.Module): def __init__( self, dim: int, output_dim: int, project_out: bool = False ): super().__init__() self.dim = dim self.output_dim = output_dim self.proj_out = nn.Linear(dim, output_dim) if (dim != output_dim or project_out) else nn.Identity() def forward(self, x): raise NotImplementedError() class NumberConditioner(Conditioner): ''' Conditioner that takes a list of floats, normalizes them for a given range, and returns a list of embeddings ''' def __init__(self, output_dim: int, min_val: float=0, max_val: float=1 ): super().__init__(output_dim, output_dim) self.min_val = min_val self.max_val = max_val self.embedder = NumberEmbedder(features=output_dim) def forward(self, floats, device=None): floats = [float(x) for x in floats] if device is None: device = next(self.embedder.parameters()).device floats = torch.tensor(floats).to(device) floats = floats.clamp(self.min_val, self.max_val) normalized_floats = (floats - self.min_val) / (self.max_val - self.min_val) embedder_dtype = next(self.embedder.parameters()).dtype normalized_floats = normalized_floats.to(embedder_dtype) float_embeds = self.embedder(normalized_floats).unsqueeze(1) return [float_embeds, torch.ones(float_embeds.shape[0], 1).to(device)]
""" This file is part of ComfyUI. Copyright (C) 2024 Stability AI This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <https: """ import torch import torch.nn as nn from comfy.ldm.modules.attention import optimized_attention class Linear(torch.nn.Linear): def reset_parameters(self): return None class Conv2d(torch.nn.Conv2d): def reset_parameters(self): return None class OptimizedAttention(nn.Module): def __init__(self, c, nhead, dropout=0.0, dtype=None, device=None, operations=None): super().__init__() self.heads = nhead self.to_q = operations.Linear(c, c, bias=True, dtype=dtype, device=device) self.to_k = operations.Linear(c, c, bias=True, dtype=dtype, device=device) self.to_v = operations.Linear(c, c, bias=True, dtype=dtype, device=device) self.out_proj = operations.Linear(c, c, bias=True, dtype=dtype, device=device) def forward(self, q, k, v): q = self.to_q(q) k = self.to_k(k) v = self.to_v(v) out = optimized_attention(q, k, v, self.heads) return self.out_proj(out) class Attention2D(nn.Module): def __init__(self, c, nhead, dropout=0.0, dtype=None, device=None, operations=None): super().__init__() self.attn = OptimizedAttention(c, nhead, dtype=dtype, device=device, operations=operations) def forward(self, x, kv, self_attn=False): orig_shape = x.shape x = x.view(x.size(0), x.size(1), -1).permute(0, 2, 1) if self_attn: kv = torch.cat([x, kv], dim=1) x = self.attn(x, kv, kv) x = x.permute(0, 2, 1).view(*orig_shape) return x def LayerNorm2d_op(operations): class LayerNorm2d(operations.LayerNorm): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def forward(self, x): return super().forward(x.permute(0, 2, 3, 1)).permute(0, 3, 1, 2) return LayerNorm2d class GlobalResponseNorm(nn.Module): "from https: def __init__(self, dim, dtype=None, device=None): super().__init__() self.gamma = nn.Parameter(torch.zeros(1, 1, 1, dim, dtype=dtype, device=device)) self.beta = nn.Parameter(torch.zeros(1, 1, 1, dim, dtype=dtype, device=device)) def forward(self, x): Gx = torch.norm(x, p=2, dim=(1, 2), keepdim=True) Nx = Gx / (Gx.mean(dim=-1, keepdim=True) + 1e-6) return self.gamma.to(device=x.device, dtype=x.dtype) * (x * Nx) + self.beta.to(device=x.device, dtype=x.dtype) + x class ResBlock(nn.Module): def __init__(self, c, c_skip=0, kernel_size=3, dropout=0.0, dtype=None, device=None, operations=None): super().__init__() self.depthwise = operations.Conv2d(c, c, kernel_size=kernel_size, padding=kernel_size self.norm = LayerNorm2d_op(operations)(c, elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) self.channelwise = nn.Sequential( operations.Linear(c + c_skip, c * 4, dtype=dtype, device=device), nn.GELU(), GlobalResponseNorm(c * 4, dtype=dtype, device=device), nn.Dropout(dropout), operations.Linear(c * 4, c, dtype=dtype, device=device) ) def forward(self, x, x_skip=None): x_res = x x = self.norm(self.depthwise(x)) if x_skip is not None: x = torch.cat([x, x_skip], dim=1) x = self.channelwise(x.permute(0, 2, 3, 1)).permute(0, 3, 1, 2) return x + x_res class AttnBlock(nn.Module): def __init__(self, c, c_cond, nhead, self_attn=True, dropout=0.0, dtype=None, device=None, operations=None): super().__init__() self.self_attn = self_attn self.norm = LayerNorm2d_op(operations)(c, elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) self.attention = Attention2D(c, nhead, dropout, dtype=dtype, device=device, operations=operations) self.kv_mapper = nn.Sequential( nn.SiLU(), operations.Linear(c_cond, c, dtype=dtype, device=device) ) def forward(self, x, kv): kv = self.kv_mapper(kv) x = x + self.attention(self.norm(x), kv, self_attn=self.self_attn) return x class FeedForwardBlock(nn.Module): def __init__(self, c, dropout=0.0, dtype=None, device=None, operations=None): super().__init__() self.norm = LayerNorm2d_op(operations)(c, elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) self.channelwise = nn.Sequential( operations.Linear(c, c * 4, dtype=dtype, device=device), nn.GELU(), GlobalResponseNorm(c * 4, dtype=dtype, device=device), nn.Dropout(dropout), operations.Linear(c * 4, c, dtype=dtype, device=device) ) def forward(self, x): x = x + self.channelwise(self.norm(x).permute(0, 2, 3, 1)).permute(0, 3, 1, 2) return x class TimestepBlock(nn.Module): def __init__(self, c, c_timestep, conds=['sca'], dtype=None, device=None, operations=None): super().__init__() self.mapper = operations.Linear(c_timestep, c * 2, dtype=dtype, device=device) self.conds = conds for cname in conds: setattr(self, f"mapper_{cname}", operations.Linear(c_timestep, c * 2, dtype=dtype, device=device)) def forward(self, x, t): t = t.chunk(len(self.conds) + 1, dim=1) a, b = self.mapper(t[0])[:, :, None, None].chunk(2, dim=1) for i, c in enumerate(self.conds): ac, bc = getattr(self, f"mapper_{c}")(t[i + 1])[:, :, None, None].chunk(2, dim=1) a, b = a + ac, b + bc return x * (1 + a) + b
""" This file is part of ComfyUI. Copyright (C) 2024 Stability AI This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <https: """ import torch import torchvision from torch import nn from .common import LayerNorm2d_op class CNetResBlock(nn.Module): def __init__(self, c, dtype=None, device=None, operations=None): super().__init__() self.blocks = nn.Sequential( LayerNorm2d_op(operations)(c, dtype=dtype, device=device), nn.GELU(), operations.Conv2d(c, c, kernel_size=3, padding=1), LayerNorm2d_op(operations)(c, dtype=dtype, device=device), nn.GELU(), operations.Conv2d(c, c, kernel_size=3, padding=1), ) def forward(self, x): return x + self.blocks(x) class ControlNet(nn.Module): def __init__(self, c_in=3, c_proj=2048, proj_blocks=None, bottleneck_mode=None, dtype=None, device=None, operations=nn): super().__init__() if bottleneck_mode is None: bottleneck_mode = 'effnet' self.proj_blocks = proj_blocks if bottleneck_mode == 'effnet': embd_channels = 1280 self.backbone = torchvision.models.efficientnet_v2_s().features.eval() if c_in != 3: in_weights = self.backbone[0][0].weight.data self.backbone[0][0] = operations.Conv2d(c_in, 24, kernel_size=3, stride=2, bias=False, dtype=dtype, device=device) if c_in > 3: self.backbone[0][0].weight.data[:, :3] = in_weights[:, :3].clone() else: self.backbone[0][0].weight.data = in_weights[:, :c_in].clone() elif bottleneck_mode == 'simple': embd_channels = c_in self.backbone = nn.Sequential( operations.Conv2d(embd_channels, embd_channels * 4, kernel_size=3, padding=1, dtype=dtype, device=device), nn.LeakyReLU(0.2, inplace=True), operations.Conv2d(embd_channels * 4, embd_channels, kernel_size=3, padding=1, dtype=dtype, device=device), ) elif bottleneck_mode == 'large': self.backbone = nn.Sequential( operations.Conv2d(c_in, 4096 * 4, kernel_size=1, dtype=dtype, device=device), nn.LeakyReLU(0.2, inplace=True), operations.Conv2d(4096 * 4, 1024, kernel_size=1, dtype=dtype, device=device), *[CNetResBlock(1024, dtype=dtype, device=device, operations=operations) for _ in range(8)], operations.Conv2d(1024, 1280, kernel_size=1, dtype=dtype, device=device), ) embd_channels = 1280 else: raise ValueError(f'Unknown bottleneck mode: {bottleneck_mode}') self.projections = nn.ModuleList() for _ in range(len(proj_blocks)): self.projections.append(nn.Sequential( operations.Conv2d(embd_channels, embd_channels, kernel_size=1, bias=False, dtype=dtype, device=device), nn.LeakyReLU(0.2, inplace=True), operations.Conv2d(embd_channels, c_proj, kernel_size=1, bias=False, dtype=dtype, device=device), )) self.xl = False self.input_channels = c_in self.unshuffle_amount = 8 def forward(self, x): x = self.backbone(x) proj_outputs = [None for _ in range(max(self.proj_blocks) + 1)] for i, idx in enumerate(self.proj_blocks): proj_outputs[idx] = self.projections[i](x) return proj_outputs
""" This file is part of ComfyUI. Copyright (C) 2024 Stability AI This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <https: """ import torch from torch import nn from torch.autograd import Function class vector_quantize(Function): @staticmethod def forward(ctx, x, codebook): with torch.no_grad(): codebook_sqr = torch.sum(codebook ** 2, dim=1) x_sqr = torch.sum(x ** 2, dim=1, keepdim=True) dist = torch.addmm(codebook_sqr + x_sqr, x, codebook.t(), alpha=-2.0, beta=1.0) _, indices = dist.min(dim=1) ctx.save_for_backward(indices, codebook) ctx.mark_non_differentiable(indices) nn = torch.index_select(codebook, 0, indices) return nn, indices @staticmethod def backward(ctx, grad_output, grad_indices): grad_inputs, grad_codebook = None, None if ctx.needs_input_grad[0]: grad_inputs = grad_output.clone() if ctx.needs_input_grad[1]: indices, codebook = ctx.saved_tensors grad_codebook = torch.zeros_like(codebook) grad_codebook.index_add_(0, indices, grad_output) return (grad_inputs, grad_codebook) class VectorQuantize(nn.Module): def __init__(self, embedding_size, k, ema_decay=0.99, ema_loss=False): """ Takes an input of variable size (as long as the last dimension matches the embedding size). Returns one tensor containing the nearest neigbour embeddings to each of the inputs, with the same size as the input, vq and commitment components for the loss as a touple in the second output and the indices of the quantized vectors in the third: quantized, (vq_loss, commit_loss), indices """ super(VectorQuantize, self).__init__() self.codebook = nn.Embedding(k, embedding_size) self.codebook.weight.data.uniform_(-1./k, 1./k) self.vq = vector_quantize.apply self.ema_decay = ema_decay self.ema_loss = ema_loss if ema_loss: self.register_buffer('ema_element_count', torch.ones(k)) self.register_buffer('ema_weight_sum', torch.zeros_like(self.codebook.weight)) def _laplace_smoothing(self, x, epsilon): n = torch.sum(x) return ((x + epsilon) / (n + x.size(0) * epsilon) * n) def _updateEMA(self, z_e_x, indices): mask = nn.functional.one_hot(indices, self.ema_element_count.size(0)).float() elem_count = mask.sum(dim=0) weight_sum = torch.mm(mask.t(), z_e_x) self.ema_element_count = (self.ema_decay * self.ema_element_count) + ((1-self.ema_decay) * elem_count) self.ema_element_count = self._laplace_smoothing(self.ema_element_count, 1e-5) self.ema_weight_sum = (self.ema_decay * self.ema_weight_sum) + ((1-self.ema_decay) * weight_sum) self.codebook.weight.data = self.ema_weight_sum / self.ema_element_count.unsqueeze(-1) def idx2vq(self, idx, dim=-1): q_idx = self.codebook(idx) if dim != -1: q_idx = q_idx.movedim(-1, dim) return q_idx def forward(self, x, get_losses=True, dim=-1): if dim != -1: x = x.movedim(dim, -1) z_e_x = x.contiguous().view(-1, x.size(-1)) if len(x.shape) > 2 else x z_q_x, indices = self.vq(z_e_x, self.codebook.weight.detach()) vq_loss, commit_loss = None, None if self.ema_loss and self.training: self._updateEMA(z_e_x.detach(), indices.detach()) z_q_x_grd = torch.index_select(self.codebook.weight, dim=0, index=indices) if get_losses: vq_loss = (z_q_x_grd - z_e_x.detach()).pow(2).mean() commit_loss = (z_e_x - z_q_x_grd.detach()).pow(2).mean() z_q_x = z_q_x.view(x.shape) if dim != -1: z_q_x = z_q_x.movedim(-1, dim) return z_q_x, (vq_loss, commit_loss), indices.view(x.shape[:-1]) class ResBlock(nn.Module): def __init__(self, c, c_hidden): super().__init__() self.norm1 = nn.LayerNorm(c, elementwise_affine=False, eps=1e-6) self.depthwise = nn.Sequential( nn.ReplicationPad2d(1), nn.Conv2d(c, c, kernel_size=3, groups=c) ) self.norm2 = nn.LayerNorm(c, elementwise_affine=False, eps=1e-6) self.channelwise = nn.Sequential( nn.Linear(c, c_hidden), nn.GELU(), nn.Linear(c_hidden, c), ) self.gammas = nn.Parameter(torch.zeros(6), requires_grad=True) def _basic_init(module): if isinstance(module, nn.Linear) or isinstance(module, nn.Conv2d): torch.nn.init.xavier_uniform_(module.weight) if module.bias is not None: nn.init.constant_(module.bias, 0) self.apply(_basic_init) def _norm(self, x, norm): return norm(x.permute(0, 2, 3, 1)).permute(0, 3, 1, 2) def forward(self, x): mods = self.gammas x_temp = self._norm(x, self.norm1) * (1 + mods[0]) + mods[1] try: x = x + self.depthwise(x_temp) * mods[2] except: x_temp = self.depthwise[0](x_temp.float()).to(x.dtype) x = x + self.depthwise[1](x_temp) * mods[2] x_temp = self._norm(x, self.norm2) * (1 + mods[3]) + mods[4] x = x + self.channelwise(x_temp.permute(0, 2, 3, 1)).permute(0, 3, 1, 2) * mods[5] return x class StageA(nn.Module): def __init__(self, levels=2, bottleneck_blocks=12, c_hidden=384, c_latent=4, codebook_size=8192): super().__init__() self.c_latent = c_latent c_levels = [c_hidden self.in_block = nn.Sequential( nn.PixelUnshuffle(2), nn.Conv2d(3 * 4, c_levels[0], kernel_size=1) ) down_blocks = [] for i in range(levels): if i > 0: down_blocks.append(nn.Conv2d(c_levels[i - 1], c_levels[i], kernel_size=4, stride=2, padding=1)) block = ResBlock(c_levels[i], c_levels[i] * 4) down_blocks.append(block) down_blocks.append(nn.Sequential( nn.Conv2d(c_levels[-1], c_latent, kernel_size=1, bias=False), nn.BatchNorm2d(c_latent), )) self.down_blocks = nn.Sequential(*down_blocks) self.down_blocks[0] self.codebook_size = codebook_size self.vquantizer = VectorQuantize(c_latent, k=codebook_size) up_blocks = [nn.Sequential( nn.Conv2d(c_latent, c_levels[-1], kernel_size=1) )] for i in range(levels): for j in range(bottleneck_blocks if i == 0 else 1): block = ResBlock(c_levels[levels - 1 - i], c_levels[levels - 1 - i] * 4) up_blocks.append(block) if i < levels - 1: up_blocks.append( nn.ConvTranspose2d(c_levels[levels - 1 - i], c_levels[levels - 2 - i], kernel_size=4, stride=2, padding=1)) self.up_blocks = nn.Sequential(*up_blocks) self.out_block = nn.Sequential( nn.Conv2d(c_levels[0], 3 * 4, kernel_size=1), nn.PixelShuffle(2), ) def encode(self, x, quantize=False): x = self.in_block(x) x = self.down_blocks(x) if quantize: qe, (vq_loss, commit_loss), indices = self.vquantizer.forward(x, dim=1) return qe, x, indices, vq_loss + commit_loss * 0.25 else: return x def decode(self, x): x = self.up_blocks(x) x = self.out_block(x) return x def forward(self, x, quantize=False): qe, x, _, vq_loss = self.encode(x, quantize) x = self.decode(qe) return x, vq_loss class Discriminator(nn.Module): def __init__(self, c_in=3, c_cond=0, c_hidden=512, depth=6): super().__init__() d = max(depth - 3, 3) layers = [ nn.utils.spectral_norm(nn.Conv2d(c_in, c_hidden nn.LeakyReLU(0.2), ] for i in range(depth - 1): c_in = c_hidden c_out = c_hidden layers.append(nn.utils.spectral_norm(nn.Conv2d(c_in, c_out, kernel_size=3, stride=2, padding=1))) layers.append(nn.InstanceNorm2d(c_out)) layers.append(nn.LeakyReLU(0.2)) self.encoder = nn.Sequential(*layers) self.shuffle = nn.Conv2d((c_hidden + c_cond) if c_cond > 0 else c_hidden, 1, kernel_size=1) self.logits = nn.Sigmoid() def forward(self, x, cond=None): x = self.encoder(x) if cond is not None: cond = cond.view(cond.size(0), cond.size(1), 1, 1, ).expand(-1, -1, x.size(-2), x.size(-1)) x = torch.cat([x, cond], dim=1) x = self.shuffle(x) x = self.logits(x) return x
""" This file is part of ComfyUI. Copyright (C) 2024 Stability AI This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <https: """ import math import torch from torch import nn from .common import AttnBlock, LayerNorm2d_op, ResBlock, FeedForwardBlock, TimestepBlock class StageB(nn.Module): def __init__(self, c_in=4, c_out=4, c_r=64, patch_size=2, c_cond=1280, c_hidden=[320, 640, 1280, 1280], nhead=[-1, -1, 20, 20], blocks=[[2, 6, 28, 6], [6, 28, 6, 2]], block_repeat=[[1, 1, 1, 1], [3, 3, 2, 2]], level_config=['CT', 'CT', 'CTA', 'CTA'], c_clip=1280, c_clip_seq=4, c_effnet=16, c_pixels=3, kernel_size=3, dropout=[0, 0, 0.0, 0.0], self_attn=True, t_conds=['sca'], stable_cascade_stage=None, dtype=None, device=None, operations=None): super().__init__() self.dtype = dtype self.c_r = c_r self.t_conds = t_conds self.c_clip_seq = c_clip_seq if not isinstance(dropout, list): dropout = [dropout] * len(c_hidden) if not isinstance(self_attn, list): self_attn = [self_attn] * len(c_hidden) self.effnet_mapper = nn.Sequential( operations.Conv2d(c_effnet, c_hidden[0] * 4, kernel_size=1, dtype=dtype, device=device), nn.GELU(), operations.Conv2d(c_hidden[0] * 4, c_hidden[0], kernel_size=1, dtype=dtype, device=device), LayerNorm2d_op(operations)(c_hidden[0], elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) ) self.pixels_mapper = nn.Sequential( operations.Conv2d(c_pixels, c_hidden[0] * 4, kernel_size=1, dtype=dtype, device=device), nn.GELU(), operations.Conv2d(c_hidden[0] * 4, c_hidden[0], kernel_size=1, dtype=dtype, device=device), LayerNorm2d_op(operations)(c_hidden[0], elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) ) self.clip_mapper = operations.Linear(c_clip, c_cond * c_clip_seq, dtype=dtype, device=device) self.clip_norm = operations.LayerNorm(c_cond, elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) self.embedding = nn.Sequential( nn.PixelUnshuffle(patch_size), operations.Conv2d(c_in * (patch_size ** 2), c_hidden[0], kernel_size=1, dtype=dtype, device=device), LayerNorm2d_op(operations)(c_hidden[0], elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) ) def get_block(block_type, c_hidden, nhead, c_skip=0, dropout=0, self_attn=True): if block_type == 'C': return ResBlock(c_hidden, c_skip, kernel_size=kernel_size, dropout=dropout, dtype=dtype, device=device, operations=operations) elif block_type == 'A': return AttnBlock(c_hidden, c_cond, nhead, self_attn=self_attn, dropout=dropout, dtype=dtype, device=device, operations=operations) elif block_type == 'F': return FeedForwardBlock(c_hidden, dropout=dropout, dtype=dtype, device=device, operations=operations) elif block_type == 'T': return TimestepBlock(c_hidden, c_r, conds=t_conds, dtype=dtype, device=device, operations=operations) else: raise Exception(f'Block type {block_type} not supported') self.down_blocks = nn.ModuleList() self.down_downscalers = nn.ModuleList() self.down_repeat_mappers = nn.ModuleList() for i in range(len(c_hidden)): if i > 0: self.down_downscalers.append(nn.Sequential( LayerNorm2d_op(operations)(c_hidden[i - 1], elementwise_affine=False, eps=1e-6, dtype=dtype, device=device), operations.Conv2d(c_hidden[i - 1], c_hidden[i], kernel_size=2, stride=2, dtype=dtype, device=device), )) else: self.down_downscalers.append(nn.Identity()) down_block = nn.ModuleList() for _ in range(blocks[0][i]): for block_type in level_config[i]: block = get_block(block_type, c_hidden[i], nhead[i], dropout=dropout[i], self_attn=self_attn[i]) down_block.append(block) self.down_blocks.append(down_block) if block_repeat is not None: block_repeat_mappers = nn.ModuleList() for _ in range(block_repeat[0][i] - 1): block_repeat_mappers.append(operations.Conv2d(c_hidden[i], c_hidden[i], kernel_size=1, dtype=dtype, device=device)) self.down_repeat_mappers.append(block_repeat_mappers) self.up_blocks = nn.ModuleList() self.up_upscalers = nn.ModuleList() self.up_repeat_mappers = nn.ModuleList() for i in reversed(range(len(c_hidden))): if i > 0: self.up_upscalers.append(nn.Sequential( LayerNorm2d_op(operations)(c_hidden[i], elementwise_affine=False, eps=1e-6, dtype=dtype, device=device), operations.ConvTranspose2d(c_hidden[i], c_hidden[i - 1], kernel_size=2, stride=2, dtype=dtype, device=device), )) else: self.up_upscalers.append(nn.Identity()) up_block = nn.ModuleList() for j in range(blocks[1][::-1][i]): for k, block_type in enumerate(level_config[i]): c_skip = c_hidden[i] if i < len(c_hidden) - 1 and j == k == 0 else 0 block = get_block(block_type, c_hidden[i], nhead[i], c_skip=c_skip, dropout=dropout[i], self_attn=self_attn[i]) up_block.append(block) self.up_blocks.append(up_block) if block_repeat is not None: block_repeat_mappers = nn.ModuleList() for _ in range(block_repeat[1][::-1][i] - 1): block_repeat_mappers.append(operations.Conv2d(c_hidden[i], c_hidden[i], kernel_size=1, dtype=dtype, device=device)) self.up_repeat_mappers.append(block_repeat_mappers) self.clf = nn.Sequential( LayerNorm2d_op(operations)(c_hidden[0], elementwise_affine=False, eps=1e-6, dtype=dtype, device=device), operations.Conv2d(c_hidden[0], c_out * (patch_size ** 2), kernel_size=1, dtype=dtype, device=device), nn.PixelShuffle(patch_size), ) def gen_r_embedding(self, r, max_positions=10000): r = r * max_positions half_dim = self.c_r emb = math.log(max_positions) / (half_dim - 1) emb = torch.arange(half_dim, device=r.device).float().mul(-emb).exp() emb = r[:, None] * emb[None, :] emb = torch.cat([emb.sin(), emb.cos()], dim=1) if self.c_r % 2 == 1: emb = nn.functional.pad(emb, (0, 1), mode='constant') return emb def gen_c_embeddings(self, clip): if len(clip.shape) == 2: clip = clip.unsqueeze(1) clip = self.clip_mapper(clip).view(clip.size(0), clip.size(1) * self.c_clip_seq, -1) clip = self.clip_norm(clip) return clip def _down_encode(self, x, r_embed, clip): level_outputs = [] block_group = zip(self.down_blocks, self.down_downscalers, self.down_repeat_mappers) for down_block, downscaler, repmap in block_group: x = downscaler(x) for i in range(len(repmap) + 1): for block in down_block: if isinstance(block, ResBlock) or ( hasattr(block, '_fsdp_wrapped_module') and isinstance(block._fsdp_wrapped_module, ResBlock)): x = block(x) elif isinstance(block, AttnBlock) or ( hasattr(block, '_fsdp_wrapped_module') and isinstance(block._fsdp_wrapped_module, AttnBlock)): x = block(x, clip) elif isinstance(block, TimestepBlock) or ( hasattr(block, '_fsdp_wrapped_module') and isinstance(block._fsdp_wrapped_module, TimestepBlock)): x = block(x, r_embed) else: x = block(x) if i < len(repmap): x = repmap[i](x) level_outputs.insert(0, x) return level_outputs def _up_decode(self, level_outputs, r_embed, clip): x = level_outputs[0] block_group = zip(self.up_blocks, self.up_upscalers, self.up_repeat_mappers) for i, (up_block, upscaler, repmap) in enumerate(block_group): for j in range(len(repmap) + 1): for k, block in enumerate(up_block): if isinstance(block, ResBlock) or ( hasattr(block, '_fsdp_wrapped_module') and isinstance(block._fsdp_wrapped_module, ResBlock)): skip = level_outputs[i] if k == 0 and i > 0 else None if skip is not None and (x.size(-1) != skip.size(-1) or x.size(-2) != skip.size(-2)): x = torch.nn.functional.interpolate(x, skip.shape[-2:], mode='bilinear', align_corners=True) x = block(x, skip) elif isinstance(block, AttnBlock) or ( hasattr(block, '_fsdp_wrapped_module') and isinstance(block._fsdp_wrapped_module, AttnBlock)): x = block(x, clip) elif isinstance(block, TimestepBlock) or ( hasattr(block, '_fsdp_wrapped_module') and isinstance(block._fsdp_wrapped_module, TimestepBlock)): x = block(x, r_embed) else: x = block(x) if j < len(repmap): x = repmap[j](x) x = upscaler(x) return x def forward(self, x, r, effnet, clip, pixels=None, **kwargs): if pixels is None: pixels = x.new_zeros(x.size(0), 3, 8, 8) r_embed = self.gen_r_embedding(r).to(dtype=x.dtype) for c in self.t_conds: t_cond = kwargs.get(c, torch.zeros_like(r)) r_embed = torch.cat([r_embed, self.gen_r_embedding(t_cond).to(dtype=x.dtype)], dim=1) clip = self.gen_c_embeddings(clip) x = self.embedding(x) x = x + self.effnet_mapper( nn.functional.interpolate(effnet, size=x.shape[-2:], mode='bilinear', align_corners=True)) x = x + nn.functional.interpolate(self.pixels_mapper(pixels), size=x.shape[-2:], mode='bilinear', align_corners=True) level_outputs = self._down_encode(x, r_embed, clip) x = self._up_decode(level_outputs, r_embed, clip) return self.clf(x) def update_weights_ema(self, src_model, beta=0.999): for self_params, src_params in zip(self.parameters(), src_model.parameters()): self_params.data = self_params.data * beta + src_params.data.clone().to(self_params.device) * (1 - beta) for self_buffers, src_buffers in zip(self.buffers(), src_model.buffers()): self_buffers.data = self_buffers.data * beta + src_buffers.data.clone().to(self_buffers.device) * (1 - beta)
""" This file is part of ComfyUI. Copyright (C) 2024 Stability AI This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <https: """ import torch from torch import nn import math from .common import AttnBlock, LayerNorm2d_op, ResBlock, FeedForwardBlock, TimestepBlock class UpDownBlock2d(nn.Module): def __init__(self, c_in, c_out, mode, enabled=True, dtype=None, device=None, operations=None): super().__init__() assert mode in ['up', 'down'] interpolation = nn.Upsample(scale_factor=2 if mode == 'up' else 0.5, mode='bilinear', align_corners=True) if enabled else nn.Identity() mapping = operations.Conv2d(c_in, c_out, kernel_size=1, dtype=dtype, device=device) self.blocks = nn.ModuleList([interpolation, mapping] if mode == 'up' else [mapping, interpolation]) def forward(self, x): for block in self.blocks: x = block(x) return x class StageC(nn.Module): def __init__(self, c_in=16, c_out=16, c_r=64, patch_size=1, c_cond=2048, c_hidden=[2048, 2048], nhead=[32, 32], blocks=[[8, 24], [24, 8]], block_repeat=[[1, 1], [1, 1]], level_config=['CTA', 'CTA'], c_clip_text=1280, c_clip_text_pooled=1280, c_clip_img=768, c_clip_seq=4, kernel_size=3, dropout=[0.0, 0.0], self_attn=True, t_conds=['sca', 'crp'], switch_level=[False], stable_cascade_stage=None, dtype=None, device=None, operations=None): super().__init__() self.dtype = dtype self.c_r = c_r self.t_conds = t_conds self.c_clip_seq = c_clip_seq if not isinstance(dropout, list): dropout = [dropout] * len(c_hidden) if not isinstance(self_attn, list): self_attn = [self_attn] * len(c_hidden) self.clip_txt_mapper = operations.Linear(c_clip_text, c_cond, dtype=dtype, device=device) self.clip_txt_pooled_mapper = operations.Linear(c_clip_text_pooled, c_cond * c_clip_seq, dtype=dtype, device=device) self.clip_img_mapper = operations.Linear(c_clip_img, c_cond * c_clip_seq, dtype=dtype, device=device) self.clip_norm = operations.LayerNorm(c_cond, elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) self.embedding = nn.Sequential( nn.PixelUnshuffle(patch_size), operations.Conv2d(c_in * (patch_size ** 2), c_hidden[0], kernel_size=1, dtype=dtype, device=device), LayerNorm2d_op(operations)(c_hidden[0], elementwise_affine=False, eps=1e-6) ) def get_block(block_type, c_hidden, nhead, c_skip=0, dropout=0, self_attn=True): if block_type == 'C': return ResBlock(c_hidden, c_skip, kernel_size=kernel_size, dropout=dropout, dtype=dtype, device=device, operations=operations) elif block_type == 'A': return AttnBlock(c_hidden, c_cond, nhead, self_attn=self_attn, dropout=dropout, dtype=dtype, device=device, operations=operations) elif block_type == 'F': return FeedForwardBlock(c_hidden, dropout=dropout, dtype=dtype, device=device, operations=operations) elif block_type == 'T': return TimestepBlock(c_hidden, c_r, conds=t_conds, dtype=dtype, device=device, operations=operations) else: raise Exception(f'Block type {block_type} not supported') self.down_blocks = nn.ModuleList() self.down_downscalers = nn.ModuleList() self.down_repeat_mappers = nn.ModuleList() for i in range(len(c_hidden)): if i > 0: self.down_downscalers.append(nn.Sequential( LayerNorm2d_op(operations)(c_hidden[i - 1], elementwise_affine=False, eps=1e-6), UpDownBlock2d(c_hidden[i - 1], c_hidden[i], mode='down', enabled=switch_level[i - 1], dtype=dtype, device=device, operations=operations) )) else: self.down_downscalers.append(nn.Identity()) down_block = nn.ModuleList() for _ in range(blocks[0][i]): for block_type in level_config[i]: block = get_block(block_type, c_hidden[i], nhead[i], dropout=dropout[i], self_attn=self_attn[i]) down_block.append(block) self.down_blocks.append(down_block) if block_repeat is not None: block_repeat_mappers = nn.ModuleList() for _ in range(block_repeat[0][i] - 1): block_repeat_mappers.append(operations.Conv2d(c_hidden[i], c_hidden[i], kernel_size=1, dtype=dtype, device=device)) self.down_repeat_mappers.append(block_repeat_mappers) self.up_blocks = nn.ModuleList() self.up_upscalers = nn.ModuleList() self.up_repeat_mappers = nn.ModuleList() for i in reversed(range(len(c_hidden))): if i > 0: self.up_upscalers.append(nn.Sequential( LayerNorm2d_op(operations)(c_hidden[i], elementwise_affine=False, eps=1e-6), UpDownBlock2d(c_hidden[i], c_hidden[i - 1], mode='up', enabled=switch_level[i - 1], dtype=dtype, device=device, operations=operations) )) else: self.up_upscalers.append(nn.Identity()) up_block = nn.ModuleList() for j in range(blocks[1][::-1][i]): for k, block_type in enumerate(level_config[i]): c_skip = c_hidden[i] if i < len(c_hidden) - 1 and j == k == 0 else 0 block = get_block(block_type, c_hidden[i], nhead[i], c_skip=c_skip, dropout=dropout[i], self_attn=self_attn[i]) up_block.append(block) self.up_blocks.append(up_block) if block_repeat is not None: block_repeat_mappers = nn.ModuleList() for _ in range(block_repeat[1][::-1][i] - 1): block_repeat_mappers.append(operations.Conv2d(c_hidden[i], c_hidden[i], kernel_size=1, dtype=dtype, device=device)) self.up_repeat_mappers.append(block_repeat_mappers) self.clf = nn.Sequential( LayerNorm2d_op(operations)(c_hidden[0], elementwise_affine=False, eps=1e-6, dtype=dtype, device=device), operations.Conv2d(c_hidden[0], c_out * (patch_size ** 2), kernel_size=1, dtype=dtype, device=device), nn.PixelShuffle(patch_size), ) def gen_r_embedding(self, r, max_positions=10000): r = r * max_positions half_dim = self.c_r emb = math.log(max_positions) / (half_dim - 1) emb = torch.arange(half_dim, device=r.device).float().mul(-emb).exp() emb = r[:, None] * emb[None, :] emb = torch.cat([emb.sin(), emb.cos()], dim=1) if self.c_r % 2 == 1: emb = nn.functional.pad(emb, (0, 1), mode='constant') return emb def gen_c_embeddings(self, clip_txt, clip_txt_pooled, clip_img): clip_txt = self.clip_txt_mapper(clip_txt) if len(clip_txt_pooled.shape) == 2: clip_txt_pooled = clip_txt_pooled.unsqueeze(1) if len(clip_img.shape) == 2: clip_img = clip_img.unsqueeze(1) clip_txt_pool = self.clip_txt_pooled_mapper(clip_txt_pooled).view(clip_txt_pooled.size(0), clip_txt_pooled.size(1) * self.c_clip_seq, -1) clip_img = self.clip_img_mapper(clip_img).view(clip_img.size(0), clip_img.size(1) * self.c_clip_seq, -1) clip = torch.cat([clip_txt, clip_txt_pool, clip_img], dim=1) clip = self.clip_norm(clip) return clip def _down_encode(self, x, r_embed, clip, cnet=None): level_outputs = [] block_group = zip(self.down_blocks, self.down_downscalers, self.down_repeat_mappers) for down_block, downscaler, repmap in block_group: x = downscaler(x) for i in range(len(repmap) + 1): for block in down_block: if isinstance(block, ResBlock) or ( hasattr(block, '_fsdp_wrapped_module') and isinstance(block._fsdp_wrapped_module, ResBlock)): if cnet is not None: next_cnet = cnet.pop() if next_cnet is not None: x = x + nn.functional.interpolate(next_cnet, size=x.shape[-2:], mode='bilinear', align_corners=True).to(x.dtype) x = block(x) elif isinstance(block, AttnBlock) or ( hasattr(block, '_fsdp_wrapped_module') and isinstance(block._fsdp_wrapped_module, AttnBlock)): x = block(x, clip) elif isinstance(block, TimestepBlock) or ( hasattr(block, '_fsdp_wrapped_module') and isinstance(block._fsdp_wrapped_module, TimestepBlock)): x = block(x, r_embed) else: x = block(x) if i < len(repmap): x = repmap[i](x) level_outputs.insert(0, x) return level_outputs def _up_decode(self, level_outputs, r_embed, clip, cnet=None): x = level_outputs[0] block_group = zip(self.up_blocks, self.up_upscalers, self.up_repeat_mappers) for i, (up_block, upscaler, repmap) in enumerate(block_group): for j in range(len(repmap) + 1): for k, block in enumerate(up_block): if isinstance(block, ResBlock) or ( hasattr(block, '_fsdp_wrapped_module') and isinstance(block._fsdp_wrapped_module, ResBlock)): skip = level_outputs[i] if k == 0 and i > 0 else None if skip is not None and (x.size(-1) != skip.size(-1) or x.size(-2) != skip.size(-2)): x = torch.nn.functional.interpolate(x, skip.shape[-2:], mode='bilinear', align_corners=True) if cnet is not None: next_cnet = cnet.pop() if next_cnet is not None: x = x + nn.functional.interpolate(next_cnet, size=x.shape[-2:], mode='bilinear', align_corners=True).to(x.dtype) x = block(x, skip) elif isinstance(block, AttnBlock) or ( hasattr(block, '_fsdp_wrapped_module') and isinstance(block._fsdp_wrapped_module, AttnBlock)): x = block(x, clip) elif isinstance(block, TimestepBlock) or ( hasattr(block, '_fsdp_wrapped_module') and isinstance(block._fsdp_wrapped_module, TimestepBlock)): x = block(x, r_embed) else: x = block(x) if j < len(repmap): x = repmap[j](x) x = upscaler(x) return x def forward(self, x, r, clip_text, clip_text_pooled, clip_img, control=None, **kwargs): r_embed = self.gen_r_embedding(r).to(dtype=x.dtype) for c in self.t_conds: t_cond = kwargs.get(c, torch.zeros_like(r)) r_embed = torch.cat([r_embed, self.gen_r_embedding(t_cond).to(dtype=x.dtype)], dim=1) clip = self.gen_c_embeddings(clip_text, clip_text_pooled, clip_img) if control is not None: cnet = control.get("input") else: cnet = None x = self.embedding(x) level_outputs = self._down_encode(x, r_embed, clip, cnet) x = self._up_decode(level_outputs, r_embed, clip, cnet) return self.clf(x) def update_weights_ema(self, src_model, beta=0.999): for self_params, src_params in zip(self.parameters(), src_model.parameters()): self_params.data = self_params.data * beta + src_params.data.clone().to(self_params.device) * (1 - beta) for self_buffers, src_buffers in zip(self.buffers(), src_model.buffers()): self_buffers.data = self_buffers.data * beta + src_buffers.data.clone().to(self_buffers.device) * (1 - beta)
""" This file is part of ComfyUI. Copyright (C) 2024 Stability AI This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <https: """ import torch import torchvision from torch import nn class EfficientNetEncoder(nn.Module): def __init__(self, c_latent=16): super().__init__() self.backbone = torchvision.models.efficientnet_v2_s().features.eval() self.mapper = nn.Sequential( nn.Conv2d(1280, c_latent, kernel_size=1, bias=False), nn.BatchNorm2d(c_latent, affine=False), ) self.mean = nn.Parameter(torch.tensor([0.485, 0.456, 0.406])) self.std = nn.Parameter(torch.tensor([0.229, 0.224, 0.225])) def forward(self, x): x = x * 0.5 + 0.5 x = (x - self.mean.view([3,1,1])) / self.std.view([3,1,1]) o = self.mapper(self.backbone(x)) return o class Previewer(nn.Module): def __init__(self, c_in=16, c_hidden=512, c_out=3): super().__init__() self.blocks = nn.Sequential( nn.Conv2d(c_in, c_hidden, kernel_size=1), nn.GELU(), nn.BatchNorm2d(c_hidden), nn.Conv2d(c_hidden, c_hidden, kernel_size=3, padding=1), nn.GELU(), nn.BatchNorm2d(c_hidden), nn.ConvTranspose2d(c_hidden, c_hidden nn.GELU(), nn.BatchNorm2d(c_hidden nn.Conv2d(c_hidden nn.GELU(), nn.BatchNorm2d(c_hidden nn.ConvTranspose2d(c_hidden nn.GELU(), nn.BatchNorm2d(c_hidden nn.Conv2d(c_hidden nn.GELU(), nn.BatchNorm2d(c_hidden nn.ConvTranspose2d(c_hidden nn.GELU(), nn.BatchNorm2d(c_hidden nn.Conv2d(c_hidden nn.GELU(), nn.BatchNorm2d(c_hidden nn.Conv2d(c_hidden ) def forward(self, x): return (self.blocks(x) - 0.5) * 2.0 class StageC_coder(nn.Module): def __init__(self): super().__init__() self.previewer = Previewer() self.encoder = EfficientNetEncoder() def encode(self, x): return self.encoder(x) def decode(self, x): return self.previewer(x)
import torch from contextlib import contextmanager from typing import Any, Dict, List, Optional, Tuple, Union from comfy.ldm.modules.distributions.distributions import DiagonalGaussianDistribution from comfy.ldm.util import instantiate_from_config from comfy.ldm.modules.ema import LitEma import comfy.ops class DiagonalGaussianRegularizer(torch.nn.Module): def __init__(self, sample: bool = True): super().__init__() self.sample = sample def get_trainable_parameters(self) -> Any: yield from () def forward(self, z: torch.Tensor) -> Tuple[torch.Tensor, dict]: log = dict() posterior = DiagonalGaussianDistribution(z) if self.sample: z = posterior.sample() else: z = posterior.mode() kl_loss = posterior.kl() kl_loss = torch.sum(kl_loss) / kl_loss.shape[0] log["kl_loss"] = kl_loss return z, log class AbstractAutoencoder(torch.nn.Module): """ This is the base class for all autoencoders, including image autoencoders, image autoencoders with discriminators, unCLIP models, etc. Hence, it is fairly general, and specific features (e.g. discriminator training, encoding, decoding) must be implemented in subclasses. """ def __init__( self, ema_decay: Union[None, float] = None, monitor: Union[None, str] = None, input_key: str = "jpg", **kwargs, ): super().__init__() self.input_key = input_key self.use_ema = ema_decay is not None if monitor is not None: self.monitor = monitor if self.use_ema: self.model_ema = LitEma(self, decay=ema_decay) logpy.info(f"Keeping EMAs of {len(list(self.model_ema.buffers()))}.") def get_input(self, batch) -> Any: raise NotImplementedError() def on_train_batch_end(self, *args, **kwargs): if self.use_ema: self.model_ema(self) @contextmanager def ema_scope(self, context=None): if self.use_ema: self.model_ema.store(self.parameters()) self.model_ema.copy_to(self) if context is not None: logpy.info(f"{context}: Switched to EMA weights") try: yield None finally: if self.use_ema: self.model_ema.restore(self.parameters()) if context is not None: logpy.info(f"{context}: Restored training weights") def encode(self, *args, **kwargs) -> torch.Tensor: raise NotImplementedError("encode()-method of abstract base class called") def decode(self, *args, **kwargs) -> torch.Tensor: raise NotImplementedError("decode()-method of abstract base class called") def instantiate_optimizer_from_config(self, params, lr, cfg): logpy.info(f"loading >>> {cfg['target']} <<< optimizer from config") return get_obj_from_str(cfg["target"])( params, lr=lr, **cfg.get("params", dict()) ) def configure_optimizers(self) -> Any: raise NotImplementedError() class AutoencodingEngine(AbstractAutoencoder): """ Base class for all image autoencoders that we train, like VQGAN or AutoencoderKL (we also restore them explicitly as special cases for legacy reasons). Regularizations such as KL or VQ are moved to the regularizer class. """ def __init__( self, *args, encoder_config: Dict, decoder_config: Dict, regularizer_config: Dict, **kwargs, ): super().__init__(*args, **kwargs) self.encoder: torch.nn.Module = instantiate_from_config(encoder_config) self.decoder: torch.nn.Module = instantiate_from_config(decoder_config) self.regularization: AbstractRegularizer = instantiate_from_config( regularizer_config ) def get_last_layer(self): return self.decoder.get_last_layer() def encode( self, x: torch.Tensor, return_reg_log: bool = False, unregularized: bool = False, ) -> Union[torch.Tensor, Tuple[torch.Tensor, dict]]: z = self.encoder(x) if unregularized: return z, dict() z, reg_log = self.regularization(z) if return_reg_log: return z, reg_log return z def decode(self, z: torch.Tensor, **kwargs) -> torch.Tensor: x = self.decoder(z, **kwargs) return x def forward( self, x: torch.Tensor, **additional_decode_kwargs ) -> Tuple[torch.Tensor, torch.Tensor, dict]: z, reg_log = self.encode(x, return_reg_log=True) dec = self.decode(z, **additional_decode_kwargs) return z, dec, reg_log class AutoencodingEngineLegacy(AutoencodingEngine): def __init__(self, embed_dim: int, **kwargs): self.max_batch_size = kwargs.pop("max_batch_size", None) ddconfig = kwargs.pop("ddconfig") super().__init__( encoder_config={ "target": "comfy.ldm.modules.diffusionmodules.model.Encoder", "params": ddconfig, }, decoder_config={ "target": "comfy.ldm.modules.diffusionmodules.model.Decoder", "params": ddconfig, }, **kwargs, ) self.quant_conv = comfy.ops.disable_weight_init.Conv2d( (1 + ddconfig["double_z"]) * ddconfig["z_channels"], (1 + ddconfig["double_z"]) * embed_dim, 1, ) self.post_quant_conv = comfy.ops.disable_weight_init.Conv2d(embed_dim, ddconfig["z_channels"], 1) self.embed_dim = embed_dim def get_autoencoder_params(self) -> list: params = super().get_autoencoder_params() return params def encode( self, x: torch.Tensor, return_reg_log: bool = False ) -> Union[torch.Tensor, Tuple[torch.Tensor, dict]]: if self.max_batch_size is None: z = self.encoder(x) z = self.quant_conv(z) else: N = x.shape[0] bs = self.max_batch_size n_batches = int(math.ceil(N / bs)) z = list() for i_batch in range(n_batches): z_batch = self.encoder(x[i_batch * bs : (i_batch + 1) * bs]) z_batch = self.quant_conv(z_batch) z.append(z_batch) z = torch.cat(z, 0) z, reg_log = self.regularization(z) if return_reg_log: return z, reg_log return z def decode(self, z: torch.Tensor, **decoder_kwargs) -> torch.Tensor: if self.max_batch_size is None: dec = self.post_quant_conv(z) dec = self.decoder(dec, **decoder_kwargs) else: N = z.shape[0] bs = self.max_batch_size n_batches = int(math.ceil(N / bs)) dec = list() for i_batch in range(n_batches): dec_batch = self.post_quant_conv(z[i_batch * bs : (i_batch + 1) * bs]) dec_batch = self.decoder(dec_batch, **decoder_kwargs) dec.append(dec_batch) dec = torch.cat(dec, 0) return dec class AutoencoderKL(AutoencodingEngineLegacy): def __init__(self, **kwargs): if "lossconfig" in kwargs: kwargs["loss_config"] = kwargs.pop("lossconfig") super().__init__( regularizer_config={ "target": ( "comfy.ldm.models.autoencoder.DiagonalGaussianRegularizer" ) }, **kwargs, )
import math import torch import torch.nn.functional as F from torch import nn, einsum from einops import rearrange, repeat from typing import Optional import logging from .diffusionmodules.util import AlphaBlender, timestep_embedding from .sub_quadratic_attention import efficient_dot_product_attention from comfy import model_management if model_management.xformers_enabled(): import xformers import xformers.ops from comfy.cli_args import args import comfy.ops ops = comfy.ops.disable_weight_init FORCE_UPCAST_ATTENTION_DTYPE = model_management.force_upcast_attention_dtype() def get_attn_precision(attn_precision): if args.dont_upcast_attention: return None if FORCE_UPCAST_ATTENTION_DTYPE is not None: return FORCE_UPCAST_ATTENTION_DTYPE return attn_precision def exists(val): return val is not None def uniq(arr): return{el: True for el in arr}.keys() def default(val, d): if exists(val): return val return d def max_neg_value(t): return -torch.finfo(t.dtype).max def init_(tensor): dim = tensor.shape[-1] std = 1 / math.sqrt(dim) tensor.uniform_(-std, std) return tensor class GEGLU(nn.Module): def __init__(self, dim_in, dim_out, dtype=None, device=None, operations=ops): super().__init__() self.proj = operations.Linear(dim_in, dim_out * 2, dtype=dtype, device=device) def forward(self, x): x, gate = self.proj(x).chunk(2, dim=-1) return x * F.gelu(gate) class FeedForward(nn.Module): def __init__(self, dim, dim_out=None, mult=4, glu=False, dropout=0., dtype=None, device=None, operations=ops): super().__init__() inner_dim = int(dim * mult) dim_out = default(dim_out, dim) project_in = nn.Sequential( operations.Linear(dim, inner_dim, dtype=dtype, device=device), nn.GELU() ) if not glu else GEGLU(dim, inner_dim, dtype=dtype, device=device, operations=operations) self.net = nn.Sequential( project_in, nn.Dropout(dropout), operations.Linear(inner_dim, dim_out, dtype=dtype, device=device) ) def forward(self, x): return self.net(x) def Normalize(in_channels, dtype=None, device=None): return torch.nn.GroupNorm(num_groups=32, num_channels=in_channels, eps=1e-6, affine=True, dtype=dtype, device=device) def attention_basic(q, k, v, heads, mask=None, attn_precision=None, skip_reshape=False): attn_precision = get_attn_precision(attn_precision) if skip_reshape: b, _, _, dim_head = q.shape else: b, _, dim_head = q.shape dim_head scale = dim_head ** -0.5 h = heads if skip_reshape: q, k, v = map( lambda t: t.reshape(b * heads, -1, dim_head), (q, k, v), ) else: q, k, v = map( lambda t: t.unsqueeze(3) .reshape(b, -1, heads, dim_head) .permute(0, 2, 1, 3) .reshape(b * heads, -1, dim_head) .contiguous(), (q, k, v), ) if attn_precision == torch.float32: sim = einsum('b i d, b j d -> b i j', q.float(), k.float()) * scale else: sim = einsum('b i d, b j d -> b i j', q, k) * scale del q, k if exists(mask): if mask.dtype == torch.bool: mask = rearrange(mask, 'b ... -> b (...)') max_neg_value = -torch.finfo(sim.dtype).max mask = repeat(mask, 'b j -> (b h) () j', h=h) sim.masked_fill_(~mask, max_neg_value) else: if len(mask.shape) == 2: bs = 1 else: bs = mask.shape[0] mask = mask.reshape(bs, -1, mask.shape[-2], mask.shape[-1]).expand(b, heads, -1, -1).reshape(-1, mask.shape[-2], mask.shape[-1]) sim.add_(mask) sim = sim.softmax(dim=-1) out = einsum('b i j, b j d -> b i d', sim.to(v.dtype), v) out = ( out.unsqueeze(0) .reshape(b, heads, -1, dim_head) .permute(0, 2, 1, 3) .reshape(b, -1, heads * dim_head) ) return out def attention_sub_quad(query, key, value, heads, mask=None, attn_precision=None, skip_reshape=False): attn_precision = get_attn_precision(attn_precision) if skip_reshape: b, _, _, dim_head = query.shape else: b, _, dim_head = query.shape dim_head scale = dim_head ** -0.5 if skip_reshape: query = query.reshape(b * heads, -1, dim_head) value = value.reshape(b * heads, -1, dim_head) key = key.reshape(b * heads, -1, dim_head).movedim(1, 2) else: query = query.unsqueeze(3).reshape(b, -1, heads, dim_head).permute(0, 2, 1, 3).reshape(b * heads, -1, dim_head) value = value.unsqueeze(3).reshape(b, -1, heads, dim_head).permute(0, 2, 1, 3).reshape(b * heads, -1, dim_head) key = key.unsqueeze(3).reshape(b, -1, heads, dim_head).permute(0, 2, 3, 1).reshape(b * heads, dim_head, -1) dtype = query.dtype upcast_attention = attn_precision == torch.float32 and query.dtype != torch.float32 if upcast_attention: bytes_per_token = torch.finfo(torch.float32).bits else: bytes_per_token = torch.finfo(query.dtype).bits batch_x_heads, q_tokens, _ = query.shape _, _, k_tokens = key.shape qk_matmul_size_bytes = batch_x_heads * bytes_per_token * q_tokens * k_tokens mem_free_total, mem_free_torch = model_management.get_free_memory(query.device, True) kv_chunk_size_min = None kv_chunk_size = None query_chunk_size = None for x in [4096, 2048, 1024, 512, 256]: count = mem_free_total / (batch_x_heads * bytes_per_token * x * 4.0) if count >= k_tokens: kv_chunk_size = k_tokens query_chunk_size = x break if query_chunk_size is None: query_chunk_size = 512 if mask is not None: if len(mask.shape) == 2: bs = 1 else: bs = mask.shape[0] mask = mask.reshape(bs, -1, mask.shape[-2], mask.shape[-1]).expand(b, heads, -1, -1).reshape(-1, mask.shape[-2], mask.shape[-1]) hidden_states = efficient_dot_product_attention( query, key, value, query_chunk_size=query_chunk_size, kv_chunk_size=kv_chunk_size, kv_chunk_size_min=kv_chunk_size_min, use_checkpoint=False, upcast_attention=upcast_attention, mask=mask, ) hidden_states = hidden_states.to(dtype) hidden_states = hidden_states.unflatten(0, (-1, heads)).transpose(1,2).flatten(start_dim=2) return hidden_states def attention_split(q, k, v, heads, mask=None, attn_precision=None, skip_reshape=False): attn_precision = get_attn_precision(attn_precision) if skip_reshape: b, _, _, dim_head = q.shape else: b, _, dim_head = q.shape dim_head scale = dim_head ** -0.5 h = heads if skip_reshape: q, k, v = map( lambda t: t.reshape(b * heads, -1, dim_head), (q, k, v), ) else: q, k, v = map( lambda t: t.unsqueeze(3) .reshape(b, -1, heads, dim_head) .permute(0, 2, 1, 3) .reshape(b * heads, -1, dim_head) .contiguous(), (q, k, v), ) r1 = torch.zeros(q.shape[0], q.shape[1], v.shape[2], device=q.device, dtype=q.dtype) mem_free_total = model_management.get_free_memory(q.device) if attn_precision == torch.float32: element_size = 4 upcast = True else: element_size = q.element_size() upcast = False gb = 1024 ** 3 tensor_size = q.shape[0] * q.shape[1] * k.shape[1] * element_size modifier = 3 mem_required = tensor_size * modifier steps = 1 if mem_required > mem_free_total: steps = 2**(math.ceil(math.log(mem_required / mem_free_total, 2))) if steps > 64: max_res = math.floor(math.sqrt(math.sqrt(mem_free_total / 2.5)) / 8) * 64 raise RuntimeError(f'Not enough memory, use lower resolution (max approx. {max_res}x{max_res}). ' f'Need: {mem_required/64/gb:0.1f}GB free, Have:{mem_free_total/gb:0.1f}GB free') if mask is not None: if len(mask.shape) == 2: bs = 1 else: bs = mask.shape[0] mask = mask.reshape(bs, -1, mask.shape[-2], mask.shape[-1]).expand(b, heads, -1, -1).reshape(-1, mask.shape[-2], mask.shape[-1]) first_op_done = False cleared_cache = False while True: try: slice_size = q.shape[1] for i in range(0, q.shape[1], slice_size): end = i + slice_size if upcast: with torch.autocast(enabled=False, device_type = 'cuda'): s1 = einsum('b i d, b j d -> b i j', q[:, i:end].float(), k.float()) * scale else: s1 = einsum('b i d, b j d -> b i j', q[:, i:end], k) * scale if mask is not None: if len(mask.shape) == 2: s1 += mask[i:end] else: s1 += mask[:, i:end] s2 = s1.softmax(dim=-1).to(v.dtype) del s1 first_op_done = True r1[:, i:end] = einsum('b i j, b j d -> b i d', s2, v) del s2 break except model_management.OOM_EXCEPTION as e: if first_op_done == False: model_management.soft_empty_cache(True) if cleared_cache == False: cleared_cache = True logging.warning("out of memory error, emptying cache and trying again") continue steps *= 2 if steps > 64: raise e logging.warning("out of memory error, increasing steps and trying again {}".format(steps)) else: raise e del q, k, v r1 = ( r1.unsqueeze(0) .reshape(b, heads, -1, dim_head) .permute(0, 2, 1, 3) .reshape(b, -1, heads * dim_head) ) return r1 BROKEN_XFORMERS = False try: x_vers = xformers.__version__ BROKEN_XFORMERS = x_vers.startswith("0.0.2") and not x_vers.startswith("0.0.20") except: pass def attention_xformers(q, k, v, heads, mask=None, attn_precision=None, skip_reshape=False): if skip_reshape: b, _, _, dim_head = q.shape else: b, _, dim_head = q.shape dim_head disabled_xformers = False if BROKEN_XFORMERS: if b * heads > 65535: disabled_xformers = True if not disabled_xformers: if torch.jit.is_tracing() or torch.jit.is_scripting(): disabled_xformers = True if disabled_xformers: return attention_pytorch(q, k, v, heads, mask) if skip_reshape: q, k, v = map( lambda t: t.reshape(b * heads, -1, dim_head), (q, k, v), ) else: q, k, v = map( lambda t: t.reshape(b, -1, heads, dim_head), (q, k, v), ) if mask is not None: pad = 8 - q.shape[1] % 8 mask_out = torch.empty([q.shape[0], q.shape[1], q.shape[1] + pad], dtype=q.dtype, device=q.device) mask_out[:, :, :mask.shape[-1]] = mask mask = mask_out[:, :, :mask.shape[-1]] out = xformers.ops.memory_efficient_attention(q, k, v, attn_bias=mask) if skip_reshape: out = ( out.unsqueeze(0) .reshape(b, heads, -1, dim_head) .permute(0, 2, 1, 3) .reshape(b, -1, heads * dim_head) ) else: out = ( out.reshape(b, -1, heads * dim_head) ) return out def attention_pytorch(q, k, v, heads, mask=None, attn_precision=None, skip_reshape=False): if skip_reshape: b, _, _, dim_head = q.shape else: b, _, dim_head = q.shape dim_head q, k, v = map( lambda t: t.view(b, -1, heads, dim_head).transpose(1, 2), (q, k, v), ) out = torch.nn.functional.scaled_dot_product_attention(q, k, v, attn_mask=mask, dropout_p=0.0, is_causal=False) out = ( out.transpose(1, 2).reshape(b, -1, heads * dim_head) ) return out optimized_attention = attention_basic if model_management.xformers_enabled(): logging.info("Using xformers cross attention") optimized_attention = attention_xformers elif model_management.pytorch_attention_enabled(): logging.info("Using pytorch cross attention") optimized_attention = attention_pytorch else: if args.use_split_cross_attention: logging.info("Using split optimization for cross attention") optimized_attention = attention_split else: logging.info("Using sub quadratic optimization for cross attention, if you have memory or speed issues try using: --use-split-cross-attention") optimized_attention = attention_sub_quad optimized_attention_masked = optimized_attention def optimized_attention_for_device(device, mask=False, small_input=False): if small_input: if model_management.pytorch_attention_enabled(): return attention_pytorch else: return attention_basic if device == torch.device("cpu"): return attention_sub_quad if mask: return optimized_attention_masked return optimized_attention class CrossAttention(nn.Module): def __init__(self, query_dim, context_dim=None, heads=8, dim_head=64, dropout=0., attn_precision=None, dtype=None, device=None, operations=ops): super().__init__() inner_dim = dim_head * heads context_dim = default(context_dim, query_dim) self.attn_precision = attn_precision self.heads = heads self.dim_head = dim_head self.to_q = operations.Linear(query_dim, inner_dim, bias=False, dtype=dtype, device=device) self.to_k = operations.Linear(context_dim, inner_dim, bias=False, dtype=dtype, device=device) self.to_v = operations.Linear(context_dim, inner_dim, bias=False, dtype=dtype, device=device) self.to_out = nn.Sequential(operations.Linear(inner_dim, query_dim, dtype=dtype, device=device), nn.Dropout(dropout)) def forward(self, x, context=None, value=None, mask=None): q = self.to_q(x) context = default(context, x) k = self.to_k(context) if value is not None: v = self.to_v(value) del value else: v = self.to_v(context) if mask is None: out = optimized_attention(q, k, v, self.heads, attn_precision=self.attn_precision) else: out = optimized_attention_masked(q, k, v, self.heads, mask, attn_precision=self.attn_precision) return self.to_out(out) class BasicTransformerBlock(nn.Module): def __init__(self, dim, n_heads, d_head, dropout=0., context_dim=None, gated_ff=True, checkpoint=True, ff_in=False, inner_dim=None, disable_self_attn=False, disable_temporal_crossattention=False, switch_temporal_ca_to_sa=False, attn_precision=None, dtype=None, device=None, operations=ops): super().__init__() self.ff_in = ff_in or inner_dim is not None if inner_dim is None: inner_dim = dim self.is_res = inner_dim == dim self.attn_precision = attn_precision if self.ff_in: self.norm_in = operations.LayerNorm(dim, dtype=dtype, device=device) self.ff_in = FeedForward(dim, dim_out=inner_dim, dropout=dropout, glu=gated_ff, dtype=dtype, device=device, operations=operations) self.disable_self_attn = disable_self_attn self.attn1 = CrossAttention(query_dim=inner_dim, heads=n_heads, dim_head=d_head, dropout=dropout, context_dim=context_dim if self.disable_self_attn else None, attn_precision=self.attn_precision, dtype=dtype, device=device, operations=operations) self.ff = FeedForward(inner_dim, dim_out=dim, dropout=dropout, glu=gated_ff, dtype=dtype, device=device, operations=operations) if disable_temporal_crossattention: if switch_temporal_ca_to_sa: raise ValueError else: self.attn2 = None else: context_dim_attn2 = None if not switch_temporal_ca_to_sa: context_dim_attn2 = context_dim self.attn2 = CrossAttention(query_dim=inner_dim, context_dim=context_dim_attn2, heads=n_heads, dim_head=d_head, dropout=dropout, attn_precision=self.attn_precision, dtype=dtype, device=device, operations=operations) self.norm2 = operations.LayerNorm(inner_dim, dtype=dtype, device=device) self.norm1 = operations.LayerNorm(inner_dim, dtype=dtype, device=device) self.norm3 = operations.LayerNorm(inner_dim, dtype=dtype, device=device) self.n_heads = n_heads self.d_head = d_head self.switch_temporal_ca_to_sa = switch_temporal_ca_to_sa def forward(self, x, context=None, transformer_options={}): extra_options = {} block = transformer_options.get("block", None) block_index = transformer_options.get("block_index", 0) transformer_patches = {} transformer_patches_replace = {} for k in transformer_options: if k == "patches": transformer_patches = transformer_options[k] elif k == "patches_replace": transformer_patches_replace = transformer_options[k] else: extra_options[k] = transformer_options[k] extra_options["n_heads"] = self.n_heads extra_options["dim_head"] = self.d_head extra_options["attn_precision"] = self.attn_precision if self.ff_in: x_skip = x x = self.ff_in(self.norm_in(x)) if self.is_res: x += x_skip n = self.norm1(x) if self.disable_self_attn: context_attn1 = context else: context_attn1 = None value_attn1 = None if "attn1_patch" in transformer_patches: patch = transformer_patches["attn1_patch"] if context_attn1 is None: context_attn1 = n value_attn1 = context_attn1 for p in patch: n, context_attn1, value_attn1 = p(n, context_attn1, value_attn1, extra_options) if block is not None: transformer_block = (block[0], block[1], block_index) else: transformer_block = None attn1_replace_patch = transformer_patches_replace.get("attn1", {}) block_attn1 = transformer_block if block_attn1 not in attn1_replace_patch: block_attn1 = block if block_attn1 in attn1_replace_patch: if context_attn1 is None: context_attn1 = n value_attn1 = n n = self.attn1.to_q(n) context_attn1 = self.attn1.to_k(context_attn1) value_attn1 = self.attn1.to_v(value_attn1) n = attn1_replace_patch[block_attn1](n, context_attn1, value_attn1, extra_options) n = self.attn1.to_out(n) else: n = self.attn1(n, context=context_attn1, value=value_attn1) if "attn1_output_patch" in transformer_patches: patch = transformer_patches["attn1_output_patch"] for p in patch: n = p(n, extra_options) x += n if "middle_patch" in transformer_patches: patch = transformer_patches["middle_patch"] for p in patch: x = p(x, extra_options) if self.attn2 is not None: n = self.norm2(x) if self.switch_temporal_ca_to_sa: context_attn2 = n else: context_attn2 = context value_attn2 = None if "attn2_patch" in transformer_patches: patch = transformer_patches["attn2_patch"] value_attn2 = context_attn2 for p in patch: n, context_attn2, value_attn2 = p(n, context_attn2, value_attn2, extra_options) attn2_replace_patch = transformer_patches_replace.get("attn2", {}) block_attn2 = transformer_block if block_attn2 not in attn2_replace_patch: block_attn2 = block if block_attn2 in attn2_replace_patch: if value_attn2 is None: value_attn2 = context_attn2 n = self.attn2.to_q(n) context_attn2 = self.attn2.to_k(context_attn2) value_attn2 = self.attn2.to_v(value_attn2) n = attn2_replace_patch[block_attn2](n, context_attn2, value_attn2, extra_options) n = self.attn2.to_out(n) else: n = self.attn2(n, context=context_attn2, value=value_attn2) if "attn2_output_patch" in transformer_patches: patch = transformer_patches["attn2_output_patch"] for p in patch: n = p(n, extra_options) x += n if self.is_res: x_skip = x x = self.ff(self.norm3(x)) if self.is_res: x += x_skip return x class SpatialTransformer(nn.Module): """ Transformer block for image-like data. First, project the input (aka embedding) and reshape to b, t, d. Then apply standard transformer action. Finally, reshape to image NEW: use_linear for more efficiency instead of the 1x1 convs """ def __init__(self, in_channels, n_heads, d_head, depth=1, dropout=0., context_dim=None, disable_self_attn=False, use_linear=False, use_checkpoint=True, attn_precision=None, dtype=None, device=None, operations=ops): super().__init__() if exists(context_dim) and not isinstance(context_dim, list): context_dim = [context_dim] * depth self.in_channels = in_channels inner_dim = n_heads * d_head self.norm = operations.GroupNorm(num_groups=32, num_channels=in_channels, eps=1e-6, affine=True, dtype=dtype, device=device) if not use_linear: self.proj_in = operations.Conv2d(in_channels, inner_dim, kernel_size=1, stride=1, padding=0, dtype=dtype, device=device) else: self.proj_in = operations.Linear(in_channels, inner_dim, dtype=dtype, device=device) self.transformer_blocks = nn.ModuleList( [BasicTransformerBlock(inner_dim, n_heads, d_head, dropout=dropout, context_dim=context_dim[d], disable_self_attn=disable_self_attn, checkpoint=use_checkpoint, attn_precision=attn_precision, dtype=dtype, device=device, operations=operations) for d in range(depth)] ) if not use_linear: self.proj_out = operations.Conv2d(inner_dim,in_channels, kernel_size=1, stride=1, padding=0, dtype=dtype, device=device) else: self.proj_out = operations.Linear(in_channels, inner_dim, dtype=dtype, device=device) self.use_linear = use_linear def forward(self, x, context=None, transformer_options={}): if not isinstance(context, list): context = [context] * len(self.transformer_blocks) b, c, h, w = x.shape x_in = x x = self.norm(x) if not self.use_linear: x = self.proj_in(x) x = x.movedim(1, 3).flatten(1, 2).contiguous() if self.use_linear: x = self.proj_in(x) for i, block in enumerate(self.transformer_blocks): transformer_options["block_index"] = i x = block(x, context=context[i], transformer_options=transformer_options) if self.use_linear: x = self.proj_out(x) x = x.reshape(x.shape[0], h, w, x.shape[-1]).movedim(3, 1).contiguous() if not self.use_linear: x = self.proj_out(x) return x + x_in class SpatialVideoTransformer(SpatialTransformer): def __init__( self, in_channels, n_heads, d_head, depth=1, dropout=0.0, use_linear=False, context_dim=None, use_spatial_context=False, timesteps=None, merge_strategy: str = "fixed", merge_factor: float = 0.5, time_context_dim=None, ff_in=False, checkpoint=False, time_depth=1, disable_self_attn=False, disable_temporal_crossattention=False, max_time_embed_period: int = 10000, attn_precision=None, dtype=None, device=None, operations=ops ): super().__init__( in_channels, n_heads, d_head, depth=depth, dropout=dropout, use_checkpoint=checkpoint, context_dim=context_dim, use_linear=use_linear, disable_self_attn=disable_self_attn, attn_precision=attn_precision, dtype=dtype, device=device, operations=operations ) self.time_depth = time_depth self.depth = depth self.max_time_embed_period = max_time_embed_period time_mix_d_head = d_head n_time_mix_heads = n_heads time_mix_inner_dim = int(time_mix_d_head * n_time_mix_heads) inner_dim = n_heads * d_head if use_spatial_context: time_context_dim = context_dim self.time_stack = nn.ModuleList( [ BasicTransformerBlock( inner_dim, n_time_mix_heads, time_mix_d_head, dropout=dropout, context_dim=time_context_dim, checkpoint=checkpoint, ff_in=ff_in, inner_dim=time_mix_inner_dim, disable_self_attn=disable_self_attn, disable_temporal_crossattention=disable_temporal_crossattention, attn_precision=attn_precision, dtype=dtype, device=device, operations=operations ) for _ in range(self.depth) ] ) assert len(self.time_stack) == len(self.transformer_blocks) self.use_spatial_context = use_spatial_context self.in_channels = in_channels time_embed_dim = self.in_channels * 4 self.time_pos_embed = nn.Sequential( operations.Linear(self.in_channels, time_embed_dim, dtype=dtype, device=device), nn.SiLU(), operations.Linear(time_embed_dim, self.in_channels, dtype=dtype, device=device), ) self.time_mixer = AlphaBlender( alpha=merge_factor, merge_strategy=merge_strategy ) def forward( self, x: torch.Tensor, context: Optional[torch.Tensor] = None, time_context: Optional[torch.Tensor] = None, timesteps: Optional[int] = None, image_only_indicator: Optional[torch.Tensor] = None, transformer_options={} ) -> torch.Tensor: _, _, h, w = x.shape x_in = x spatial_context = None if exists(context): spatial_context = context if self.use_spatial_context: assert ( context.ndim == 3 ), f"n dims of spatial context should be 3 but are {context.ndim}" if time_context is None: time_context = context time_context_first_timestep = time_context[::timesteps] time_context = repeat( time_context_first_timestep, "b ... -> (b n) ...", n=h * w ) elif time_context is not None and not self.use_spatial_context: time_context = repeat(time_context, "b ... -> (b n) ...", n=h * w) if time_context.ndim == 2: time_context = rearrange(time_context, "b c -> b 1 c") x = self.norm(x) if not self.use_linear: x = self.proj_in(x) x = rearrange(x, "b c h w -> b (h w) c") if self.use_linear: x = self.proj_in(x) num_frames = torch.arange(timesteps, device=x.device) num_frames = repeat(num_frames, "t -> b t", b=x.shape[0] num_frames = rearrange(num_frames, "b t -> (b t)") t_emb = timestep_embedding(num_frames, self.in_channels, repeat_only=False, max_period=self.max_time_embed_period).to(x.dtype) emb = self.time_pos_embed(t_emb) emb = emb[:, None, :] for it_, (block, mix_block) in enumerate( zip(self.transformer_blocks, self.time_stack) ): transformer_options["block_index"] = it_ x = block( x, context=spatial_context, transformer_options=transformer_options, ) x_mix = x x_mix = x_mix + emb B, S, C = x_mix.shape x_mix = rearrange(x_mix, "(b t) s c -> (b s) t c", t=timesteps) x_mix = mix_block(x_mix, context=time_context) x_mix = rearrange( x_mix, "(b s) t c -> (b t) s c", s=S, b=B ) x = self.time_mixer(x_spatial=x, x_temporal=x_mix, image_only_indicator=image_only_indicator) if self.use_linear: x = self.proj_out(x) x = rearrange(x, "b (h w) c -> b c h w", h=h, w=w) if not self.use_linear: x = self.proj_out(x) out = x + x_in return out
import torch from torch import nn class LitEma(nn.Module): def __init__(self, model, decay=0.9999, use_num_upates=True): super().__init__() if decay < 0.0 or decay > 1.0: raise ValueError('Decay must be between 0 and 1') self.m_name2s_name = {} self.register_buffer('decay', torch.tensor(decay, dtype=torch.float32)) self.register_buffer('num_updates', torch.tensor(0, dtype=torch.int) if use_num_upates else torch.tensor(-1, dtype=torch.int)) for name, p in model.named_parameters(): if p.requires_grad: s_name = name.replace('.', '') self.m_name2s_name.update({name: s_name}) self.register_buffer(s_name, p.clone().detach().data) self.collected_params = [] def reset_num_updates(self): del self.num_updates self.register_buffer('num_updates', torch.tensor(0, dtype=torch.int)) def forward(self, model): decay = self.decay if self.num_updates >= 0: self.num_updates += 1 decay = min(self.decay, (1 + self.num_updates) / (10 + self.num_updates)) one_minus_decay = 1.0 - decay with torch.no_grad(): m_param = dict(model.named_parameters()) shadow_params = dict(self.named_buffers()) for key in m_param: if m_param[key].requires_grad: sname = self.m_name2s_name[key] shadow_params[sname] = shadow_params[sname].type_as(m_param[key]) shadow_params[sname].sub_(one_minus_decay * (shadow_params[sname] - m_param[key])) else: assert not key in self.m_name2s_name def copy_to(self, model): m_param = dict(model.named_parameters()) shadow_params = dict(self.named_buffers()) for key in m_param: if m_param[key].requires_grad: m_param[key].data.copy_(shadow_params[self.m_name2s_name[key]].data) else: assert not key in self.m_name2s_name def store(self, parameters): """ Save the current parameters for restoring later. Args: parameters: Iterable of `torch.nn.Parameter`; the parameters to be temporarily stored. """ self.collected_params = [param.clone() for param in parameters] def restore(self, parameters): """ Restore the parameters stored with the `store` method. Useful to validate the model with EMA parameters without affecting the original optimization process. Store the parameters before the `copy_to` method. After validation (or model saving), use this to restore the former parameters. Args: parameters: Iterable of `torch.nn.Parameter`; the parameters to be updated with the stored parameters. """ for c_param, param in zip(self.collected_params, parameters): param.data.copy_(c_param.data)
from functools import partial import torch from torch import Tensor from torch.utils.checkpoint import checkpoint import math import logging try: from typing import Optional, NamedTuple, List, Protocol except ImportError: from typing import Optional, NamedTuple, List from typing_extensions import Protocol from torch import Tensor from typing import List from comfy import model_management def dynamic_slice( x: Tensor, starts: List[int], sizes: List[int], ) -> Tensor: slicing = [slice(start, start + size) for start, size in zip(starts, sizes)] return x[slicing] class AttnChunk(NamedTuple): exp_values: Tensor exp_weights_sum: Tensor max_score: Tensor class SummarizeChunk(Protocol): @staticmethod def __call__( query: Tensor, key_t: Tensor, value: Tensor, ) -> AttnChunk: ... class ComputeQueryChunkAttn(Protocol): @staticmethod def __call__( query: Tensor, key_t: Tensor, value: Tensor, ) -> Tensor: ... def _summarize_chunk( query: Tensor, key_t: Tensor, value: Tensor, scale: float, upcast_attention: bool, mask, ) -> AttnChunk: if upcast_attention: with torch.autocast(enabled=False, device_type = 'cuda'): query = query.float() key_t = key_t.float() attn_weights = torch.baddbmm( torch.empty(1, 1, 1, device=query.device, dtype=query.dtype), query, key_t, alpha=scale, beta=0, ) else: attn_weights = torch.baddbmm( torch.empty(1, 1, 1, device=query.device, dtype=query.dtype), query, key_t, alpha=scale, beta=0, ) max_score, _ = torch.max(attn_weights, -1, keepdim=True) max_score = max_score.detach() attn_weights -= max_score if mask is not None: attn_weights += mask torch.exp(attn_weights, out=attn_weights) exp_weights = attn_weights.to(value.dtype) exp_values = torch.bmm(exp_weights, value) max_score = max_score.squeeze(-1) return AttnChunk(exp_values, exp_weights.sum(dim=-1), max_score) def _query_chunk_attention( query: Tensor, key_t: Tensor, value: Tensor, summarize_chunk: SummarizeChunk, kv_chunk_size: int, mask, ) -> Tensor: batch_x_heads, k_channels_per_head, k_tokens = key_t.shape _, _, v_channels_per_head = value.shape def chunk_scanner(chunk_idx: int, mask) -> AttnChunk: key_chunk = dynamic_slice( key_t, (0, 0, chunk_idx), (batch_x_heads, k_channels_per_head, kv_chunk_size) ) value_chunk = dynamic_slice( value, (0, chunk_idx, 0), (batch_x_heads, kv_chunk_size, v_channels_per_head) ) if mask is not None: mask = mask[:,:,chunk_idx:chunk_idx + kv_chunk_size] return summarize_chunk(query, key_chunk, value_chunk, mask=mask) chunks: List[AttnChunk] = [ chunk_scanner(chunk, mask) for chunk in torch.arange(0, k_tokens, kv_chunk_size) ] acc_chunk = AttnChunk(*map(torch.stack, zip(*chunks))) chunk_values, chunk_weights, chunk_max = acc_chunk global_max, _ = torch.max(chunk_max, 0, keepdim=True) max_diffs = torch.exp(chunk_max - global_max) chunk_values *= torch.unsqueeze(max_diffs, -1) chunk_weights *= max_diffs all_values = chunk_values.sum(dim=0) all_weights = torch.unsqueeze(chunk_weights, -1).sum(dim=0) return all_values / all_weights def _get_attention_scores_no_kv_chunking( query: Tensor, key_t: Tensor, value: Tensor, scale: float, upcast_attention: bool, mask, ) -> Tensor: if upcast_attention: with torch.autocast(enabled=False, device_type = 'cuda'): query = query.float() key_t = key_t.float() attn_scores = torch.baddbmm( torch.empty(1, 1, 1, device=query.device, dtype=query.dtype), query, key_t, alpha=scale, beta=0, ) else: attn_scores = torch.baddbmm( torch.empty(1, 1, 1, device=query.device, dtype=query.dtype), query, key_t, alpha=scale, beta=0, ) if mask is not None: attn_scores += mask try: attn_probs = attn_scores.softmax(dim=-1) del attn_scores except model_management.OOM_EXCEPTION: logging.warning("ran out of memory while running softmax in _get_attention_scores_no_kv_chunking, trying slower in place softmax instead") attn_scores -= attn_scores.max(dim=-1, keepdim=True).values torch.exp(attn_scores, out=attn_scores) summed = torch.sum(attn_scores, dim=-1, keepdim=True) attn_scores /= summed attn_probs = attn_scores hidden_states_slice = torch.bmm(attn_probs.to(value.dtype), value) return hidden_states_slice class ScannedChunk(NamedTuple): chunk_idx: int attn_chunk: AttnChunk def efficient_dot_product_attention( query: Tensor, key_t: Tensor, value: Tensor, query_chunk_size=1024, kv_chunk_size: Optional[int] = None, kv_chunk_size_min: Optional[int] = None, use_checkpoint=True, upcast_attention=False, mask = None, ): """Computes efficient dot-product attention given query, transposed key, and value. This is efficient version of attention presented in https: Args: query: queries for calculating attention with shape of `[batch * num_heads, tokens, channels_per_head]`. key_t: keys for calculating attention with shape of `[batch * num_heads, channels_per_head, tokens]`. value: values to be used in attention with shape of `[batch * num_heads, tokens, channels_per_head]`. query_chunk_size: int: query chunks size kv_chunk_size: Optional[int]: key/value chunks size. if None: defaults to sqrt(key_tokens) kv_chunk_size_min: Optional[int]: key/value minimum chunk size. only considered when kv_chunk_size is None. changes `sqrt(key_tokens)` into `max(sqrt(key_tokens), kv_chunk_size_min)`, to ensure our chunk sizes don't get too small (smaller chunks = more chunks = less concurrent work done). use_checkpoint: bool: whether to use checkpointing (recommended True for training, False for inference) Returns: Output of shape `[batch * num_heads, query_tokens, channels_per_head]`. """ batch_x_heads, q_tokens, q_channels_per_head = query.shape _, _, k_tokens = key_t.shape scale = q_channels_per_head ** -0.5 kv_chunk_size = min(kv_chunk_size or int(math.sqrt(k_tokens)), k_tokens) if kv_chunk_size_min is not None: kv_chunk_size = max(kv_chunk_size, kv_chunk_size_min) if mask is not None and len(mask.shape) == 2: mask = mask.unsqueeze(0) def get_query_chunk(chunk_idx: int) -> Tensor: return dynamic_slice( query, (0, chunk_idx, 0), (batch_x_heads, min(query_chunk_size, q_tokens), q_channels_per_head) ) def get_mask_chunk(chunk_idx: int) -> Tensor: if mask is None: return None chunk = min(query_chunk_size, q_tokens) return mask[:,chunk_idx:chunk_idx + chunk] summarize_chunk: SummarizeChunk = partial(_summarize_chunk, scale=scale, upcast_attention=upcast_attention) summarize_chunk: SummarizeChunk = partial(checkpoint, summarize_chunk) if use_checkpoint else summarize_chunk compute_query_chunk_attn: ComputeQueryChunkAttn = partial( _get_attention_scores_no_kv_chunking, scale=scale, upcast_attention=upcast_attention ) if k_tokens <= kv_chunk_size else ( partial( _query_chunk_attention, kv_chunk_size=kv_chunk_size, summarize_chunk=summarize_chunk, ) ) if q_tokens <= query_chunk_size: return compute_query_chunk_attn( query=query, key_t=key_t, value=value, mask=mask, ) res = torch.cat([ compute_query_chunk_attn( query=get_query_chunk(i * query_chunk_size), key_t=key_t, value=value, mask=get_mask_chunk(i * query_chunk_size) ) for i in range(math.ceil(q_tokens / query_chunk_size)) ], dim=1) return res
import functools from typing import Callable, Iterable, Union import torch from einops import rearrange, repeat import comfy.ops ops = comfy.ops.disable_weight_init from .diffusionmodules.model import ( AttnBlock, Decoder, ResnetBlock, ) from .diffusionmodules.openaimodel import ResBlock, timestep_embedding from .attention import BasicTransformerBlock def partialclass(cls, *args, **kwargs): class NewCls(cls): __init__ = functools.partialmethod(cls.__init__, *args, **kwargs) return NewCls class VideoResBlock(ResnetBlock): def __init__( self, out_channels, *args, dropout=0.0, video_kernel_size=3, alpha=0.0, merge_strategy="learned", **kwargs, ): super().__init__(out_channels=out_channels, dropout=dropout, *args, **kwargs) if video_kernel_size is None: video_kernel_size = [3, 1, 1] self.time_stack = ResBlock( channels=out_channels, emb_channels=0, dropout=dropout, dims=3, use_scale_shift_norm=False, use_conv=False, up=False, down=False, kernel_size=video_kernel_size, use_checkpoint=False, skip_t_emb=True, ) self.merge_strategy = merge_strategy if self.merge_strategy == "fixed": self.register_buffer("mix_factor", torch.Tensor([alpha])) elif self.merge_strategy == "learned": self.register_parameter( "mix_factor", torch.nn.Parameter(torch.Tensor([alpha])) ) else: raise ValueError(f"unknown merge strategy {self.merge_strategy}") def get_alpha(self, bs): if self.merge_strategy == "fixed": return self.mix_factor elif self.merge_strategy == "learned": return torch.sigmoid(self.mix_factor) else: raise NotImplementedError() def forward(self, x, temb, skip_video=False, timesteps=None): b, c, h, w = x.shape if timesteps is None: timesteps = b x = super().forward(x, temb) if not skip_video: x_mix = rearrange(x, "(b t) c h w -> b c t h w", t=timesteps) x = rearrange(x, "(b t) c h w -> b c t h w", t=timesteps) x = self.time_stack(x, temb) alpha = self.get_alpha(bs=b x = alpha * x + (1.0 - alpha) * x_mix x = rearrange(x, "b c t h w -> (b t) c h w") return x class AE3DConv(ops.Conv2d): def __init__(self, in_channels, out_channels, video_kernel_size=3, *args, **kwargs): super().__init__(in_channels, out_channels, *args, **kwargs) if isinstance(video_kernel_size, Iterable): padding = [int(k else: padding = int(video_kernel_size self.time_mix_conv = ops.Conv3d( in_channels=out_channels, out_channels=out_channels, kernel_size=video_kernel_size, padding=padding, ) def forward(self, input, timesteps=None, skip_video=False): if timesteps is None: timesteps = input.shape[0] x = super().forward(input) if skip_video: return x x = rearrange(x, "(b t) c h w -> b c t h w", t=timesteps) x = self.time_mix_conv(x) return rearrange(x, "b c t h w -> (b t) c h w") class AttnVideoBlock(AttnBlock): def __init__( self, in_channels: int, alpha: float = 0, merge_strategy: str = "learned" ): super().__init__(in_channels) self.time_mix_block = BasicTransformerBlock( dim=in_channels, n_heads=1, d_head=in_channels, checkpoint=False, ff_in=True, ) time_embed_dim = self.in_channels * 4 self.video_time_embed = torch.nn.Sequential( ops.Linear(self.in_channels, time_embed_dim), torch.nn.SiLU(), ops.Linear(time_embed_dim, self.in_channels), ) self.merge_strategy = merge_strategy if self.merge_strategy == "fixed": self.register_buffer("mix_factor", torch.Tensor([alpha])) elif self.merge_strategy == "learned": self.register_parameter( "mix_factor", torch.nn.Parameter(torch.Tensor([alpha])) ) else: raise ValueError(f"unknown merge strategy {self.merge_strategy}") def forward(self, x, timesteps=None, skip_time_block=False): if skip_time_block: return super().forward(x) if timesteps is None: timesteps = x.shape[0] x_in = x x = self.attention(x) h, w = x.shape[2:] x = rearrange(x, "b c h w -> b (h w) c") x_mix = x num_frames = torch.arange(timesteps, device=x.device) num_frames = repeat(num_frames, "t -> b t", b=x.shape[0] num_frames = rearrange(num_frames, "b t -> (b t)") t_emb = timestep_embedding(num_frames, self.in_channels, repeat_only=False) emb = self.video_time_embed(t_emb) emb = emb[:, None, :] x_mix = x_mix + emb alpha = self.get_alpha().to(x.device) x_mix = self.time_mix_block(x_mix, timesteps=timesteps) x = alpha * x + (1.0 - alpha) * x_mix x = rearrange(x, "b (h w) c -> b c h w", h=h, w=w) x = self.proj_out(x) return x_in + x def get_alpha( self, ): if self.merge_strategy == "fixed": return self.mix_factor elif self.merge_strategy == "learned": return torch.sigmoid(self.mix_factor) else: raise NotImplementedError(f"unknown merge strategy {self.merge_strategy}") def make_time_attn( in_channels, attn_type="vanilla", attn_kwargs=None, alpha: float = 0, merge_strategy: str = "learned", ): return partialclass( AttnVideoBlock, in_channels, alpha=alpha, merge_strategy=merge_strategy ) class Conv2DWrapper(torch.nn.Conv2d): def forward(self, input: torch.Tensor, **kwargs) -> torch.Tensor: return super().forward(input) class VideoDecoder(Decoder): available_time_modes = ["all", "conv-only", "attn-only"] def __init__( self, *args, video_kernel_size: Union[int, list] = 3, alpha: float = 0.0, merge_strategy: str = "learned", time_mode: str = "conv-only", **kwargs, ): self.video_kernel_size = video_kernel_size self.alpha = alpha self.merge_strategy = merge_strategy self.time_mode = time_mode assert ( self.time_mode in self.available_time_modes ), f"time_mode parameter has to be in {self.available_time_modes}" if self.time_mode != "attn-only": kwargs["conv_out_op"] = partialclass(AE3DConv, video_kernel_size=self.video_kernel_size) if self.time_mode not in ["conv-only", "only-last-conv"]: kwargs["attn_op"] = partialclass(make_time_attn, alpha=self.alpha, merge_strategy=self.merge_strategy) if self.time_mode not in ["attn-only", "only-last-conv"]: kwargs["resnet_op"] = partialclass(VideoResBlock, video_kernel_size=self.video_kernel_size, alpha=self.alpha, merge_strategy=self.merge_strategy) super().__init__(*args, **kwargs) def get_last_layer(self, skip_time_mix=False, **kwargs): if self.time_mode == "attn-only": raise NotImplementedError("TODO") else: return ( self.conv_out.time_mix_conv.weight if not skip_time_mix else self.conv_out.weight )
import logging import math from typing import Dict, Optional import numpy as np import torch import torch.nn as nn from .. import attention from einops import rearrange, repeat def default(x, y): if x is not None: return x return y class Mlp(nn.Module): """ MLP as used in Vision Transformer, MLP-Mixer and related networks """ def __init__( self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, norm_layer=None, bias=True, drop=0., use_conv=False, dtype=None, device=None, operations=None, ): super().__init__() out_features = out_features or in_features hidden_features = hidden_features or in_features drop_probs = drop linear_layer = partial(operations.Conv2d, kernel_size=1) if use_conv else operations.Linear self.fc1 = linear_layer(in_features, hidden_features, bias=bias, dtype=dtype, device=device) self.act = act_layer() self.drop1 = nn.Dropout(drop_probs) self.norm = norm_layer(hidden_features) if norm_layer is not None else nn.Identity() self.fc2 = linear_layer(hidden_features, out_features, bias=bias, dtype=dtype, device=device) self.drop2 = nn.Dropout(drop_probs) def forward(self, x): x = self.fc1(x) x = self.act(x) x = self.drop1(x) x = self.norm(x) x = self.fc2(x) x = self.drop2(x) return x class PatchEmbed(nn.Module): """ 2D Image to Patch Embedding """ dynamic_img_pad: torch.jit.Final[bool] def __init__( self, img_size: Optional[int] = 224, patch_size: int = 16, in_chans: int = 3, embed_dim: int = 768, norm_layer = None, flatten: bool = True, bias: bool = True, strict_img_size: bool = True, dynamic_img_pad: bool = True, dtype=None, device=None, operations=None, ): super().__init__() self.patch_size = (patch_size, patch_size) if img_size is not None: self.img_size = (img_size, img_size) self.grid_size = tuple([s self.num_patches = self.grid_size[0] * self.grid_size[1] else: self.img_size = None self.grid_size = None self.num_patches = None self.flatten = flatten self.strict_img_size = strict_img_size self.dynamic_img_pad = dynamic_img_pad self.proj = operations.Conv2d(in_chans, embed_dim, kernel_size=patch_size, stride=patch_size, bias=bias, dtype=dtype, device=device) self.norm = norm_layer(embed_dim) if norm_layer else nn.Identity() def forward(self, x): B, C, H, W = x.shape if self.dynamic_img_pad: pad_h = (self.patch_size[0] - H % self.patch_size[0]) % self.patch_size[0] pad_w = (self.patch_size[1] - W % self.patch_size[1]) % self.patch_size[1] x = torch.nn.functional.pad(x, (0, pad_w, 0, pad_h), mode='reflect') x = self.proj(x) if self.flatten: x = x.flatten(2).transpose(1, 2) x = self.norm(x) return x def modulate(x, shift, scale): if shift is None: shift = torch.zeros_like(scale) return x * (1 + scale.unsqueeze(1)) + shift.unsqueeze(1) def get_2d_sincos_pos_embed( embed_dim, grid_size, cls_token=False, extra_tokens=0, scaling_factor=None, offset=None, ): """ grid_size: int of the grid height and width return: pos_embed: [grid_size*grid_size, embed_dim] or [1+grid_size*grid_size, embed_dim] (w/ or w/o cls_token) """ grid_h = np.arange(grid_size, dtype=np.float32) grid_w = np.arange(grid_size, dtype=np.float32) grid = np.meshgrid(grid_w, grid_h) grid = np.stack(grid, axis=0) if scaling_factor is not None: grid = grid / scaling_factor if offset is not None: grid = grid - offset grid = grid.reshape([2, 1, grid_size, grid_size]) pos_embed = get_2d_sincos_pos_embed_from_grid(embed_dim, grid) if cls_token and extra_tokens > 0: pos_embed = np.concatenate( [np.zeros([extra_tokens, embed_dim]), pos_embed], axis=0 ) return pos_embed def get_2d_sincos_pos_embed_from_grid(embed_dim, grid): assert embed_dim % 2 == 0 emb_h = get_1d_sincos_pos_embed_from_grid(embed_dim emb_w = get_1d_sincos_pos_embed_from_grid(embed_dim emb = np.concatenate([emb_h, emb_w], axis=1) return emb def get_1d_sincos_pos_embed_from_grid(embed_dim, pos): """ embed_dim: output dimension for each position pos: a list of positions to be encoded: size (M,) out: (M, D) """ assert embed_dim % 2 == 0 omega = np.arange(embed_dim omega /= embed_dim / 2.0 omega = 1.0 / 10000**omega pos = pos.reshape(-1) out = np.einsum("m,d->md", pos, omega) emb_sin = np.sin(out) emb_cos = np.cos(out) emb = np.concatenate([emb_sin, emb_cos], axis=1) return emb def get_1d_sincos_pos_embed_from_grid_torch(embed_dim, pos, device=None, dtype=torch.float32): omega = torch.arange(embed_dim omega /= embed_dim / 2.0 omega = 1.0 / 10000**omega pos = pos.reshape(-1) out = torch.einsum("m,d->md", pos, omega) emb_sin = torch.sin(out) emb_cos = torch.cos(out) emb = torch.cat([emb_sin, emb_cos], dim=1) return emb def get_2d_sincos_pos_embed_torch(embed_dim, w, h, val_center=7.5, val_magnitude=7.5, device=None, dtype=torch.float32): small = min(h, w) val_h = (h / small) * val_magnitude val_w = (w / small) * val_magnitude grid_h, grid_w = torch.meshgrid(torch.linspace(-val_h + val_center, val_h + val_center, h, device=device, dtype=dtype), torch.linspace(-val_w + val_center, val_w + val_center, w, device=device, dtype=dtype), indexing='ij') emb_h = get_1d_sincos_pos_embed_from_grid_torch(embed_dim emb_w = get_1d_sincos_pos_embed_from_grid_torch(embed_dim emb = torch.cat([emb_w, emb_h], dim=1) return emb class TimestepEmbedder(nn.Module): """ Embeds scalar timesteps into vector representations. """ def __init__(self, hidden_size, frequency_embedding_size=256, dtype=None, device=None, operations=None): super().__init__() self.mlp = nn.Sequential( operations.Linear(frequency_embedding_size, hidden_size, bias=True, dtype=dtype, device=device), nn.SiLU(), operations.Linear(hidden_size, hidden_size, bias=True, dtype=dtype, device=device), ) self.frequency_embedding_size = frequency_embedding_size @staticmethod def timestep_embedding(t, dim, max_period=10000): """ Create sinusoidal timestep embeddings. :param t: a 1-D Tensor of N indices, one per batch element. These may be fractional. :param dim: the dimension of the output. :param max_period: controls the minimum frequency of the embeddings. :return: an (N, D) Tensor of positional embeddings. """ half = dim freqs = torch.exp( -math.log(max_period) * torch.arange(start=0, end=half, dtype=torch.float32, device=t.device) / half ) args = t[:, None].float() * freqs[None] embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1) if dim % 2: embedding = torch.cat( [embedding, torch.zeros_like(embedding[:, :1])], dim=-1 ) if torch.is_floating_point(t): embedding = embedding.to(dtype=t.dtype) return embedding def forward(self, t, dtype, **kwargs): t_freq = self.timestep_embedding(t, self.frequency_embedding_size).to(dtype) t_emb = self.mlp(t_freq) return t_emb class VectorEmbedder(nn.Module): """ Embeds a flat vector of dimension input_dim """ def __init__(self, input_dim: int, hidden_size: int, dtype=None, device=None, operations=None): super().__init__() self.mlp = nn.Sequential( operations.Linear(input_dim, hidden_size, bias=True, dtype=dtype, device=device), nn.SiLU(), operations.Linear(hidden_size, hidden_size, bias=True, dtype=dtype, device=device), ) def forward(self, x: torch.Tensor) -> torch.Tensor: emb = self.mlp(x) return emb def split_qkv(qkv, head_dim): qkv = qkv.reshape(qkv.shape[0], qkv.shape[1], 3, -1, head_dim).movedim(2, 0) return qkv[0], qkv[1], qkv[2] def optimized_attention(qkv, num_heads): return attention.optimized_attention(qkv[0], qkv[1], qkv[2], num_heads) class SelfAttention(nn.Module): ATTENTION_MODES = ("xformers", "torch", "torch-hb", "math", "debug") def __init__( self, dim: int, num_heads: int = 8, qkv_bias: bool = False, qk_scale: Optional[float] = None, proj_drop: float = 0.0, attn_mode: str = "xformers", pre_only: bool = False, qk_norm: Optional[str] = None, rmsnorm: bool = False, dtype=None, device=None, operations=None, ): super().__init__() self.num_heads = num_heads self.head_dim = dim self.qkv = operations.Linear(dim, dim * 3, bias=qkv_bias, dtype=dtype, device=device) if not pre_only: self.proj = operations.Linear(dim, dim, dtype=dtype, device=device) self.proj_drop = nn.Dropout(proj_drop) assert attn_mode in self.ATTENTION_MODES self.attn_mode = attn_mode self.pre_only = pre_only if qk_norm == "rms": self.ln_q = RMSNorm(self.head_dim, elementwise_affine=True, eps=1.0e-6, dtype=dtype, device=device) self.ln_k = RMSNorm(self.head_dim, elementwise_affine=True, eps=1.0e-6, dtype=dtype, device=device) elif qk_norm == "ln": self.ln_q = operations.LayerNorm(self.head_dim, elementwise_affine=True, eps=1.0e-6, dtype=dtype, device=device) self.ln_k = operations.LayerNorm(self.head_dim, elementwise_affine=True, eps=1.0e-6, dtype=dtype, device=device) elif qk_norm is None: self.ln_q = nn.Identity() self.ln_k = nn.Identity() else: raise ValueError(qk_norm) def pre_attention(self, x: torch.Tensor) -> torch.Tensor: B, L, C = x.shape qkv = self.qkv(x) q, k, v = split_qkv(qkv, self.head_dim) q = self.ln_q(q).reshape(q.shape[0], q.shape[1], -1) k = self.ln_k(k).reshape(q.shape[0], q.shape[1], -1) return (q, k, v) def post_attention(self, x: torch.Tensor) -> torch.Tensor: assert not self.pre_only x = self.proj(x) x = self.proj_drop(x) return x def forward(self, x: torch.Tensor) -> torch.Tensor: qkv = self.pre_attention(x) x = optimized_attention( qkv, num_heads=self.num_heads ) x = self.post_attention(x) return x class RMSNorm(torch.nn.Module): def __init__( self, dim: int, elementwise_affine: bool = False, eps: float = 1e-6, device=None, dtype=None ): """ Initialize the RMSNorm normalization layer. Args: dim (int): The dimension of the input tensor. eps (float, optional): A small value added to the denominator for numerical stability. Default is 1e-6. Attributes: eps (float): A small value added to the denominator for numerical stability. weight (nn.Parameter): Learnable scaling parameter. """ super().__init__() self.eps = eps self.learnable_scale = elementwise_affine if self.learnable_scale: self.weight = nn.Parameter(torch.empty(dim, device=device, dtype=dtype)) else: self.register_parameter("weight", None) def _norm(self, x): """ Apply the RMSNorm normalization to the input tensor. Args: x (torch.Tensor): The input tensor. Returns: torch.Tensor: The normalized tensor. """ return x * torch.rsqrt(x.pow(2).mean(-1, keepdim=True) + self.eps) def forward(self, x): """ Forward pass through the RMSNorm layer. Args: x (torch.Tensor): The input tensor. Returns: torch.Tensor: The output tensor after applying RMSNorm. """ x = self._norm(x) if self.learnable_scale: return x * self.weight.to(device=x.device, dtype=x.dtype) else: return x class SwiGLUFeedForward(nn.Module): def __init__( self, dim: int, hidden_dim: int, multiple_of: int, ffn_dim_multiplier: Optional[float] = None, ): """ Initialize the FeedForward module. Args: dim (int): Input dimension. hidden_dim (int): Hidden dimension of the feedforward layer. multiple_of (int): Value to ensure hidden dimension is a multiple of this value. ffn_dim_multiplier (float, optional): Custom multiplier for hidden dimension. Defaults to None. Attributes: w1 (ColumnParallelLinear): Linear transformation for the first layer. w2 (RowParallelLinear): Linear transformation for the second layer. w3 (ColumnParallelLinear): Linear transformation for the third layer. """ super().__init__() hidden_dim = int(2 * hidden_dim / 3) if ffn_dim_multiplier is not None: hidden_dim = int(ffn_dim_multiplier * hidden_dim) hidden_dim = multiple_of * ((hidden_dim + multiple_of - 1) self.w1 = nn.Linear(dim, hidden_dim, bias=False) self.w2 = nn.Linear(hidden_dim, dim, bias=False) self.w3 = nn.Linear(dim, hidden_dim, bias=False) def forward(self, x): return self.w2(nn.functional.silu(self.w1(x)) * self.w3(x)) class DismantledBlock(nn.Module): """ A DiT block with gated adaptive layer norm (adaLN) conditioning. """ ATTENTION_MODES = ("xformers", "torch", "torch-hb", "math", "debug") def __init__( self, hidden_size: int, num_heads: int, mlp_ratio: float = 4.0, attn_mode: str = "xformers", qkv_bias: bool = False, pre_only: bool = False, rmsnorm: bool = False, scale_mod_only: bool = False, swiglu: bool = False, qk_norm: Optional[str] = None, dtype=None, device=None, operations=None, **block_kwargs, ): super().__init__() assert attn_mode in self.ATTENTION_MODES if not rmsnorm: self.norm1 = operations.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) else: self.norm1 = RMSNorm(hidden_size, elementwise_affine=False, eps=1e-6) self.attn = SelfAttention( dim=hidden_size, num_heads=num_heads, qkv_bias=qkv_bias, attn_mode=attn_mode, pre_only=pre_only, qk_norm=qk_norm, rmsnorm=rmsnorm, dtype=dtype, device=device, operations=operations ) if not pre_only: if not rmsnorm: self.norm2 = operations.LayerNorm( hidden_size, elementwise_affine=False, eps=1e-6, dtype=dtype, device=device ) else: self.norm2 = RMSNorm(hidden_size, elementwise_affine=False, eps=1e-6) mlp_hidden_dim = int(hidden_size * mlp_ratio) if not pre_only: if not swiglu: self.mlp = Mlp( in_features=hidden_size, hidden_features=mlp_hidden_dim, act_layer=lambda: nn.GELU(approximate="tanh"), drop=0, dtype=dtype, device=device, operations=operations ) else: self.mlp = SwiGLUFeedForward( dim=hidden_size, hidden_dim=mlp_hidden_dim, multiple_of=256, ) self.scale_mod_only = scale_mod_only if not scale_mod_only: n_mods = 6 if not pre_only else 2 else: n_mods = 4 if not pre_only else 1 self.adaLN_modulation = nn.Sequential( nn.SiLU(), operations.Linear(hidden_size, n_mods * hidden_size, bias=True, dtype=dtype, device=device) ) self.pre_only = pre_only def pre_attention(self, x: torch.Tensor, c: torch.Tensor) -> torch.Tensor: if not self.pre_only: if not self.scale_mod_only: ( shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp, ) = self.adaLN_modulation(c).chunk(6, dim=1) else: shift_msa = None shift_mlp = None ( scale_msa, gate_msa, scale_mlp, gate_mlp, ) = self.adaLN_modulation( c ).chunk(4, dim=1) qkv = self.attn.pre_attention(modulate(self.norm1(x), shift_msa, scale_msa)) return qkv, ( x, gate_msa, shift_mlp, scale_mlp, gate_mlp, ) else: if not self.scale_mod_only: ( shift_msa, scale_msa, ) = self.adaLN_modulation( c ).chunk(2, dim=1) else: shift_msa = None scale_msa = self.adaLN_modulation(c) qkv = self.attn.pre_attention(modulate(self.norm1(x), shift_msa, scale_msa)) return qkv, None def post_attention(self, attn, x, gate_msa, shift_mlp, scale_mlp, gate_mlp): assert not self.pre_only x = x + gate_msa.unsqueeze(1) * self.attn.post_attention(attn) x = x + gate_mlp.unsqueeze(1) * self.mlp( modulate(self.norm2(x), shift_mlp, scale_mlp) ) return x def forward(self, x: torch.Tensor, c: torch.Tensor) -> torch.Tensor: assert not self.pre_only qkv, intermediates = self.pre_attention(x, c) attn = optimized_attention( qkv, num_heads=self.attn.num_heads, ) return self.post_attention(attn, *intermediates) def block_mixing(*args, use_checkpoint=True, **kwargs): if use_checkpoint: return torch.utils.checkpoint.checkpoint( _block_mixing, *args, use_reentrant=False, **kwargs ) else: return _block_mixing(*args, **kwargs) def _block_mixing(context, x, context_block, x_block, c): context_qkv, context_intermediates = context_block.pre_attention(context, c) x_qkv, x_intermediates = x_block.pre_attention(x, c) o = [] for t in range(3): o.append(torch.cat((context_qkv[t], x_qkv[t]), dim=1)) qkv = tuple(o) attn = optimized_attention( qkv, num_heads=x_block.attn.num_heads, ) context_attn, x_attn = ( attn[:, : context_qkv[0].shape[1]], attn[:, context_qkv[0].shape[1] :], ) if not context_block.pre_only: context = context_block.post_attention(context_attn, *context_intermediates) else: context = None x = x_block.post_attention(x_attn, *x_intermediates) return context, x class JointBlock(nn.Module): """just a small wrapper to serve as a fsdp unit""" def __init__( self, *args, **kwargs, ): super().__init__() pre_only = kwargs.pop("pre_only") qk_norm = kwargs.pop("qk_norm", None) self.context_block = DismantledBlock(*args, pre_only=pre_only, qk_norm=qk_norm, **kwargs) self.x_block = DismantledBlock(*args, pre_only=False, qk_norm=qk_norm, **kwargs) def forward(self, *args, **kwargs): return block_mixing( *args, context_block=self.context_block, x_block=self.x_block, **kwargs ) class FinalLayer(nn.Module): """ The final layer of DiT. """ def __init__( self, hidden_size: int, patch_size: int, out_channels: int, total_out_channels: Optional[int] = None, dtype=None, device=None, operations=None, ): super().__init__() self.norm_final = operations.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) self.linear = ( operations.Linear(hidden_size, patch_size * patch_size * out_channels, bias=True, dtype=dtype, device=device) if (total_out_channels is None) else operations.Linear(hidden_size, total_out_channels, bias=True, dtype=dtype, device=device) ) self.adaLN_modulation = nn.Sequential( nn.SiLU(), operations.Linear(hidden_size, 2 * hidden_size, bias=True, dtype=dtype, device=device) ) def forward(self, x: torch.Tensor, c: torch.Tensor) -> torch.Tensor: shift, scale = self.adaLN_modulation(c).chunk(2, dim=1) x = modulate(self.norm_final(x), shift, scale) x = self.linear(x) return x class SelfAttentionContext(nn.Module): def __init__(self, dim, heads=8, dim_head=64, dtype=None, device=None, operations=None): super().__init__() dim_head = dim inner_dim = dim self.heads = heads self.dim_head = dim_head self.qkv = operations.Linear(dim, dim * 3, bias=True, dtype=dtype, device=device) self.proj = operations.Linear(inner_dim, dim, dtype=dtype, device=device) def forward(self, x): qkv = self.qkv(x) q, k, v = split_qkv(qkv, self.dim_head) x = optimized_attention((q.reshape(q.shape[0], q.shape[1], -1), k, v), self.heads) return self.proj(x) class ContextProcessorBlock(nn.Module): def __init__(self, context_size, dtype=None, device=None, operations=None): super().__init__() self.norm1 = operations.LayerNorm(context_size, elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) self.attn = SelfAttentionContext(context_size, dtype=dtype, device=device, operations=operations) self.norm2 = operations.LayerNorm(context_size, elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) self.mlp = Mlp(in_features=context_size, hidden_features=(context_size * 4), act_layer=lambda: nn.GELU(approximate="tanh"), drop=0, dtype=dtype, device=device, operations=operations) def forward(self, x): x += self.attn(self.norm1(x)) x += self.mlp(self.norm2(x)) return x class ContextProcessor(nn.Module): def __init__(self, context_size, num_layers, dtype=None, device=None, operations=None): super().__init__() self.layers = torch.nn.ModuleList([ContextProcessorBlock(context_size, dtype=dtype, device=device, operations=operations) for i in range(num_layers)]) self.norm = operations.LayerNorm(context_size, elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) def forward(self, x): for i, l in enumerate(self.layers): x = l(x) return self.norm(x) class MMDiT(nn.Module): """ Diffusion model with a Transformer backbone. """ def __init__( self, input_size: int = 32, patch_size: int = 2, in_channels: int = 4, depth: int = 28, mlp_ratio: float = 4.0, learn_sigma: bool = False, adm_in_channels: Optional[int] = None, context_embedder_config: Optional[Dict] = None, compile_core: bool = False, use_checkpoint: bool = False, register_length: int = 0, attn_mode: str = "torch", rmsnorm: bool = False, scale_mod_only: bool = False, swiglu: bool = False, out_channels: Optional[int] = None, pos_embed_scaling_factor: Optional[float] = None, pos_embed_offset: Optional[float] = None, pos_embed_max_size: Optional[int] = None, num_patches = None, qk_norm: Optional[str] = None, qkv_bias: bool = True, context_processor_layers = None, context_size = 4096, dtype = None, device = None, operations = None, ): super().__init__() self.dtype = dtype self.learn_sigma = learn_sigma self.in_channels = in_channels default_out_channels = in_channels * 2 if learn_sigma else in_channels self.out_channels = default(out_channels, default_out_channels) self.patch_size = patch_size self.pos_embed_scaling_factor = pos_embed_scaling_factor self.pos_embed_offset = pos_embed_offset self.pos_embed_max_size = pos_embed_max_size self.hidden_size = 64 * depth num_heads = depth self.num_heads = num_heads self.x_embedder = PatchEmbed( input_size, patch_size, in_channels, self.hidden_size, bias=True, strict_img_size=self.pos_embed_max_size is None, dtype=dtype, device=device, operations=operations ) self.t_embedder = TimestepEmbedder(self.hidden_size, dtype=dtype, device=device, operations=operations) self.y_embedder = None if adm_in_channels is not None: assert isinstance(adm_in_channels, int) self.y_embedder = VectorEmbedder(adm_in_channels, self.hidden_size, dtype=dtype, device=device, operations=operations) if context_processor_layers is not None: self.context_processor = ContextProcessor(context_size, context_processor_layers, dtype=dtype, device=device, operations=operations) else: self.context_processor = None self.context_embedder = nn.Identity() if context_embedder_config is not None: if context_embedder_config["target"] == "torch.nn.Linear": self.context_embedder = operations.Linear(**context_embedder_config["params"], dtype=dtype, device=device) self.register_length = register_length if self.register_length > 0: self.register = nn.Parameter(torch.randn(1, register_length, self.hidden_size, dtype=dtype, device=device)) if num_patches is not None: self.register_buffer( "pos_embed", torch.empty(1, num_patches, self.hidden_size, dtype=dtype, device=device), ) else: self.pos_embed = None self.use_checkpoint = use_checkpoint self.joint_blocks = nn.ModuleList( [ JointBlock( self.hidden_size, num_heads, mlp_ratio=mlp_ratio, qkv_bias=qkv_bias, attn_mode=attn_mode, pre_only=i == depth - 1, rmsnorm=rmsnorm, scale_mod_only=scale_mod_only, swiglu=swiglu, qk_norm=qk_norm, dtype=dtype, device=device, operations=operations ) for i in range(depth) ] ) self.final_layer = FinalLayer(self.hidden_size, patch_size, self.out_channels, dtype=dtype, device=device, operations=operations) if compile_core: assert False self.forward_core_with_concat = torch.compile(self.forward_core_with_concat) def cropped_pos_embed(self, hw, device=None): p = self.x_embedder.patch_size[0] h, w = hw h = (h + 1) w = (w + 1) if self.pos_embed is None: return get_2d_sincos_pos_embed_torch(self.hidden_size, w, h, device=device) assert self.pos_embed_max_size is not None assert h <= self.pos_embed_max_size, (h, self.pos_embed_max_size) assert w <= self.pos_embed_max_size, (w, self.pos_embed_max_size) top = (self.pos_embed_max_size - h) left = (self.pos_embed_max_size - w) spatial_pos_embed = rearrange( self.pos_embed, "1 (h w) c -> 1 h w c", h=self.pos_embed_max_size, w=self.pos_embed_max_size, ) spatial_pos_embed = spatial_pos_embed[:, top : top + h, left : left + w, :] spatial_pos_embed = rearrange(spatial_pos_embed, "1 h w c -> 1 (h w) c") return spatial_pos_embed def unpatchify(self, x, hw=None): """ x: (N, T, patch_size**2 * C) imgs: (N, H, W, C) """ c = self.out_channels p = self.x_embedder.patch_size[0] if hw is None: h = w = int(x.shape[1] ** 0.5) else: h, w = hw h = (h + 1) w = (w + 1) assert h * w == x.shape[1] x = x.reshape(shape=(x.shape[0], h, w, p, p, c)) x = torch.einsum("nhwpqc->nchpwq", x) imgs = x.reshape(shape=(x.shape[0], c, h * p, w * p)) return imgs def forward_core_with_concat( self, x: torch.Tensor, c_mod: torch.Tensor, context: Optional[torch.Tensor] = None, ) -> torch.Tensor: if self.register_length > 0: context = torch.cat( ( repeat(self.register, "1 ... -> b ...", b=x.shape[0]), default(context, torch.Tensor([]).type_as(x)), ), 1, ) for block in self.joint_blocks: context, x = block( context, x, c=c_mod, use_checkpoint=self.use_checkpoint, ) x = self.final_layer(x, c_mod) return x def forward( self, x: torch.Tensor, t: torch.Tensor, y: Optional[torch.Tensor] = None, context: Optional[torch.Tensor] = None, ) -> torch.Tensor: """ Forward pass of DiT. x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) t: (N,) tensor of diffusion timesteps y: (N,) tensor of class labels """ if self.context_processor is not None: context = self.context_processor(context) hw = x.shape[-2:] x = self.x_embedder(x) + self.cropped_pos_embed(hw, device=x.device).to(dtype=x.dtype, device=x.device) c = self.t_embedder(t, dtype=x.dtype) if y is not None and self.y_embedder is not None: y = self.y_embedder(y) c = c + y if context is not None: context = self.context_embedder(context) x = self.forward_core_with_concat(x, c, context) x = self.unpatchify(x, hw=hw) return x[:,:,:hw[-2],:hw[-1]] class OpenAISignatureMMDITWrapper(MMDiT): def forward( self, x: torch.Tensor, timesteps: torch.Tensor, context: Optional[torch.Tensor] = None, y: Optional[torch.Tensor] = None, **kwargs, ) -> torch.Tensor: return super().forward(x, timesteps, context=context, y=y)
import math import torch import torch.nn as nn import numpy as np from typing import Optional, Any import logging from comfy import model_management import comfy.ops ops = comfy.ops.disable_weight_init if model_management.xformers_enabled_vae(): import xformers import xformers.ops def get_timestep_embedding(timesteps, embedding_dim): """ This matches the implementation in Denoising Diffusion Probabilistic Models: From Fairseq. Build sinusoidal embeddings. This matches the implementation in tensor2tensor, but differs slightly from the description in Section 3.5 of "Attention Is All You Need". """ assert len(timesteps.shape) == 1 half_dim = embedding_dim emb = math.log(10000) / (half_dim - 1) emb = torch.exp(torch.arange(half_dim, dtype=torch.float32) * -emb) emb = emb.to(device=timesteps.device) emb = timesteps.float()[:, None] * emb[None, :] emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=1) if embedding_dim % 2 == 1: emb = torch.nn.functional.pad(emb, (0,1,0,0)) return emb def nonlinearity(x): return x*torch.sigmoid(x) def Normalize(in_channels, num_groups=32): return ops.GroupNorm(num_groups=num_groups, num_channels=in_channels, eps=1e-6, affine=True) class Upsample(nn.Module): def __init__(self, in_channels, with_conv): super().__init__() self.with_conv = with_conv if self.with_conv: self.conv = ops.Conv2d(in_channels, in_channels, kernel_size=3, stride=1, padding=1) def forward(self, x): try: x = torch.nn.functional.interpolate(x, scale_factor=2.0, mode="nearest") except: b, c, h, w = x.shape out = torch.empty((b, c, h*2, w*2), dtype=x.dtype, layout=x.layout, device=x.device) split = 8 l = out.shape[1] for i in range(0, out.shape[1], l): out[:,i:i+l] = torch.nn.functional.interpolate(x[:,i:i+l].to(torch.float32), scale_factor=2.0, mode="nearest").to(x.dtype) del x x = out if self.with_conv: x = self.conv(x) return x class Downsample(nn.Module): def __init__(self, in_channels, with_conv): super().__init__() self.with_conv = with_conv if self.with_conv: self.conv = ops.Conv2d(in_channels, in_channels, kernel_size=3, stride=2, padding=0) def forward(self, x): if self.with_conv: pad = (0,1,0,1) x = torch.nn.functional.pad(x, pad, mode="constant", value=0) x = self.conv(x) else: x = torch.nn.functional.avg_pool2d(x, kernel_size=2, stride=2) return x class ResnetBlock(nn.Module): def __init__(self, *, in_channels, out_channels=None, conv_shortcut=False, dropout, temb_channels=512): super().__init__() self.in_channels = in_channels out_channels = in_channels if out_channels is None else out_channels self.out_channels = out_channels self.use_conv_shortcut = conv_shortcut self.swish = torch.nn.SiLU(inplace=True) self.norm1 = Normalize(in_channels) self.conv1 = ops.Conv2d(in_channels, out_channels, kernel_size=3, stride=1, padding=1) if temb_channels > 0: self.temb_proj = ops.Linear(temb_channels, out_channels) self.norm2 = Normalize(out_channels) self.dropout = torch.nn.Dropout(dropout, inplace=True) self.conv2 = ops.Conv2d(out_channels, out_channels, kernel_size=3, stride=1, padding=1) if self.in_channels != self.out_channels: if self.use_conv_shortcut: self.conv_shortcut = ops.Conv2d(in_channels, out_channels, kernel_size=3, stride=1, padding=1) else: self.nin_shortcut = ops.Conv2d(in_channels, out_channels, kernel_size=1, stride=1, padding=0) def forward(self, x, temb): h = x h = self.norm1(h) h = self.swish(h) h = self.conv1(h) if temb is not None: h = h + self.temb_proj(self.swish(temb))[:,:,None,None] h = self.norm2(h) h = self.swish(h) h = self.dropout(h) h = self.conv2(h) if self.in_channels != self.out_channels: if self.use_conv_shortcut: x = self.conv_shortcut(x) else: x = self.nin_shortcut(x) return x+h def slice_attention(q, k, v): r1 = torch.zeros_like(k, device=q.device) scale = (int(q.shape[-1])**(-0.5)) mem_free_total = model_management.get_free_memory(q.device) gb = 1024 ** 3 tensor_size = q.shape[0] * q.shape[1] * k.shape[2] * q.element_size() modifier = 3 if q.element_size() == 2 else 2.5 mem_required = tensor_size * modifier steps = 1 if mem_required > mem_free_total: steps = 2**(math.ceil(math.log(mem_required / mem_free_total, 2))) while True: try: slice_size = q.shape[1] for i in range(0, q.shape[1], slice_size): end = i + slice_size s1 = torch.bmm(q[:, i:end], k) * scale s2 = torch.nn.functional.softmax(s1, dim=2).permute(0,2,1) del s1 r1[:, :, i:end] = torch.bmm(v, s2) del s2 break except model_management.OOM_EXCEPTION as e: model_management.soft_empty_cache(True) steps *= 2 if steps > 128: raise e logging.warning("out of memory error, increasing steps and trying again {}".format(steps)) return r1 def normal_attention(q, k, v): b,c,h,w = q.shape q = q.reshape(b,c,h*w) q = q.permute(0,2,1) k = k.reshape(b,c,h*w) v = v.reshape(b,c,h*w) r1 = slice_attention(q, k, v) h_ = r1.reshape(b,c,h,w) del r1 return h_ def xformers_attention(q, k, v): B, C, H, W = q.shape q, k, v = map( lambda t: t.view(B, C, -1).transpose(1, 2).contiguous(), (q, k, v), ) try: out = xformers.ops.memory_efficient_attention(q, k, v, attn_bias=None) out = out.transpose(1, 2).reshape(B, C, H, W) except NotImplementedError as e: out = slice_attention(q.view(B, -1, C), k.view(B, -1, C).transpose(1, 2), v.view(B, -1, C).transpose(1, 2)).reshape(B, C, H, W) return out def pytorch_attention(q, k, v): B, C, H, W = q.shape q, k, v = map( lambda t: t.view(B, 1, C, -1).transpose(2, 3).contiguous(), (q, k, v), ) try: out = torch.nn.functional.scaled_dot_product_attention(q, k, v, attn_mask=None, dropout_p=0.0, is_causal=False) out = out.transpose(2, 3).reshape(B, C, H, W) except model_management.OOM_EXCEPTION as e: logging.warning("scaled_dot_product_attention OOMed: switched to slice attention") out = slice_attention(q.view(B, -1, C), k.view(B, -1, C).transpose(1, 2), v.view(B, -1, C).transpose(1, 2)).reshape(B, C, H, W) return out class AttnBlock(nn.Module): def __init__(self, in_channels): super().__init__() self.in_channels = in_channels self.norm = Normalize(in_channels) self.q = ops.Conv2d(in_channels, in_channels, kernel_size=1, stride=1, padding=0) self.k = ops.Conv2d(in_channels, in_channels, kernel_size=1, stride=1, padding=0) self.v = ops.Conv2d(in_channels, in_channels, kernel_size=1, stride=1, padding=0) self.proj_out = ops.Conv2d(in_channels, in_channels, kernel_size=1, stride=1, padding=0) if model_management.xformers_enabled_vae(): logging.info("Using xformers attention in VAE") self.optimized_attention = xformers_attention elif model_management.pytorch_attention_enabled(): logging.info("Using pytorch attention in VAE") self.optimized_attention = pytorch_attention else: logging.info("Using split attention in VAE") self.optimized_attention = normal_attention def forward(self, x): h_ = x h_ = self.norm(h_) q = self.q(h_) k = self.k(h_) v = self.v(h_) h_ = self.optimized_attention(q, k, v) h_ = self.proj_out(h_) return x+h_ def make_attn(in_channels, attn_type="vanilla", attn_kwargs=None): return AttnBlock(in_channels) class Model(nn.Module): def __init__(self, *, ch, out_ch, ch_mult=(1,2,4,8), num_res_blocks, attn_resolutions, dropout=0.0, resamp_with_conv=True, in_channels, resolution, use_timestep=True, use_linear_attn=False, attn_type="vanilla"): super().__init__() if use_linear_attn: attn_type = "linear" self.ch = ch self.temb_ch = self.ch*4 self.num_resolutions = len(ch_mult) self.num_res_blocks = num_res_blocks self.resolution = resolution self.in_channels = in_channels self.use_timestep = use_timestep if self.use_timestep: self.temb = nn.Module() self.temb.dense = nn.ModuleList([ ops.Linear(self.ch, self.temb_ch), ops.Linear(self.temb_ch, self.temb_ch), ]) self.conv_in = ops.Conv2d(in_channels, self.ch, kernel_size=3, stride=1, padding=1) curr_res = resolution in_ch_mult = (1,)+tuple(ch_mult) self.down = nn.ModuleList() for i_level in range(self.num_resolutions): block = nn.ModuleList() attn = nn.ModuleList() block_in = ch*in_ch_mult[i_level] block_out = ch*ch_mult[i_level] for i_block in range(self.num_res_blocks): block.append(ResnetBlock(in_channels=block_in, out_channels=block_out, temb_channels=self.temb_ch, dropout=dropout)) block_in = block_out if curr_res in attn_resolutions: attn.append(make_attn(block_in, attn_type=attn_type)) down = nn.Module() down.block = block down.attn = attn if i_level != self.num_resolutions-1: down.downsample = Downsample(block_in, resamp_with_conv) curr_res = curr_res self.down.append(down) self.mid = nn.Module() self.mid.block_1 = ResnetBlock(in_channels=block_in, out_channels=block_in, temb_channels=self.temb_ch, dropout=dropout) self.mid.attn_1 = make_attn(block_in, attn_type=attn_type) self.mid.block_2 = ResnetBlock(in_channels=block_in, out_channels=block_in, temb_channels=self.temb_ch, dropout=dropout) self.up = nn.ModuleList() for i_level in reversed(range(self.num_resolutions)): block = nn.ModuleList() attn = nn.ModuleList() block_out = ch*ch_mult[i_level] skip_in = ch*ch_mult[i_level] for i_block in range(self.num_res_blocks+1): if i_block == self.num_res_blocks: skip_in = ch*in_ch_mult[i_level] block.append(ResnetBlock(in_channels=block_in+skip_in, out_channels=block_out, temb_channels=self.temb_ch, dropout=dropout)) block_in = block_out if curr_res in attn_resolutions: attn.append(make_attn(block_in, attn_type=attn_type)) up = nn.Module() up.block = block up.attn = attn if i_level != 0: up.upsample = Upsample(block_in, resamp_with_conv) curr_res = curr_res * 2 self.up.insert(0, up) self.norm_out = Normalize(block_in) self.conv_out = ops.Conv2d(block_in, out_ch, kernel_size=3, stride=1, padding=1) def forward(self, x, t=None, context=None): if context is not None: x = torch.cat((x, context), dim=1) if self.use_timestep: assert t is not None temb = get_timestep_embedding(t, self.ch) temb = self.temb.dense[0](temb) temb = nonlinearity(temb) temb = self.temb.dense[1](temb) else: temb = None hs = [self.conv_in(x)] for i_level in range(self.num_resolutions): for i_block in range(self.num_res_blocks): h = self.down[i_level].block[i_block](hs[-1], temb) if len(self.down[i_level].attn) > 0: h = self.down[i_level].attn[i_block](h) hs.append(h) if i_level != self.num_resolutions-1: hs.append(self.down[i_level].downsample(hs[-1])) h = hs[-1] h = self.mid.block_1(h, temb) h = self.mid.attn_1(h) h = self.mid.block_2(h, temb) for i_level in reversed(range(self.num_resolutions)): for i_block in range(self.num_res_blocks+1): h = self.up[i_level].block[i_block]( torch.cat([h, hs.pop()], dim=1), temb) if len(self.up[i_level].attn) > 0: h = self.up[i_level].attn[i_block](h) if i_level != 0: h = self.up[i_level].upsample(h) h = self.norm_out(h) h = nonlinearity(h) h = self.conv_out(h) return h def get_last_layer(self): return self.conv_out.weight class Encoder(nn.Module): def __init__(self, *, ch, out_ch, ch_mult=(1,2,4,8), num_res_blocks, attn_resolutions, dropout=0.0, resamp_with_conv=True, in_channels, resolution, z_channels, double_z=True, use_linear_attn=False, attn_type="vanilla", **ignore_kwargs): super().__init__() if use_linear_attn: attn_type = "linear" self.ch = ch self.temb_ch = 0 self.num_resolutions = len(ch_mult) self.num_res_blocks = num_res_blocks self.resolution = resolution self.in_channels = in_channels self.conv_in = ops.Conv2d(in_channels, self.ch, kernel_size=3, stride=1, padding=1) curr_res = resolution in_ch_mult = (1,)+tuple(ch_mult) self.in_ch_mult = in_ch_mult self.down = nn.ModuleList() for i_level in range(self.num_resolutions): block = nn.ModuleList() attn = nn.ModuleList() block_in = ch*in_ch_mult[i_level] block_out = ch*ch_mult[i_level] for i_block in range(self.num_res_blocks): block.append(ResnetBlock(in_channels=block_in, out_channels=block_out, temb_channels=self.temb_ch, dropout=dropout)) block_in = block_out if curr_res in attn_resolutions: attn.append(make_attn(block_in, attn_type=attn_type)) down = nn.Module() down.block = block down.attn = attn if i_level != self.num_resolutions-1: down.downsample = Downsample(block_in, resamp_with_conv) curr_res = curr_res self.down.append(down) self.mid = nn.Module() self.mid.block_1 = ResnetBlock(in_channels=block_in, out_channels=block_in, temb_channels=self.temb_ch, dropout=dropout) self.mid.attn_1 = make_attn(block_in, attn_type=attn_type) self.mid.block_2 = ResnetBlock(in_channels=block_in, out_channels=block_in, temb_channels=self.temb_ch, dropout=dropout) self.norm_out = Normalize(block_in) self.conv_out = ops.Conv2d(block_in, 2*z_channels if double_z else z_channels, kernel_size=3, stride=1, padding=1) def forward(self, x): temb = None h = self.conv_in(x) for i_level in range(self.num_resolutions): for i_block in range(self.num_res_blocks): h = self.down[i_level].block[i_block](h, temb) if len(self.down[i_level].attn) > 0: h = self.down[i_level].attn[i_block](h) if i_level != self.num_resolutions-1: h = self.down[i_level].downsample(h) h = self.mid.block_1(h, temb) h = self.mid.attn_1(h) h = self.mid.block_2(h, temb) h = self.norm_out(h) h = nonlinearity(h) h = self.conv_out(h) return h class Decoder(nn.Module): def __init__(self, *, ch, out_ch, ch_mult=(1,2,4,8), num_res_blocks, attn_resolutions, dropout=0.0, resamp_with_conv=True, in_channels, resolution, z_channels, give_pre_end=False, tanh_out=False, use_linear_attn=False, conv_out_op=ops.Conv2d, resnet_op=ResnetBlock, attn_op=AttnBlock, **ignorekwargs): super().__init__() if use_linear_attn: attn_type = "linear" self.ch = ch self.temb_ch = 0 self.num_resolutions = len(ch_mult) self.num_res_blocks = num_res_blocks self.resolution = resolution self.in_channels = in_channels self.give_pre_end = give_pre_end self.tanh_out = tanh_out in_ch_mult = (1,)+tuple(ch_mult) block_in = ch*ch_mult[self.num_resolutions-1] curr_res = resolution self.z_shape = (1,z_channels,curr_res,curr_res) logging.debug("Working with z of shape {} = {} dimensions.".format( self.z_shape, np.prod(self.z_shape))) self.conv_in = ops.Conv2d(z_channels, block_in, kernel_size=3, stride=1, padding=1) self.mid = nn.Module() self.mid.block_1 = resnet_op(in_channels=block_in, out_channels=block_in, temb_channels=self.temb_ch, dropout=dropout) self.mid.attn_1 = attn_op(block_in) self.mid.block_2 = resnet_op(in_channels=block_in, out_channels=block_in, temb_channels=self.temb_ch, dropout=dropout) self.up = nn.ModuleList() for i_level in reversed(range(self.num_resolutions)): block = nn.ModuleList() attn = nn.ModuleList() block_out = ch*ch_mult[i_level] for i_block in range(self.num_res_blocks+1): block.append(resnet_op(in_channels=block_in, out_channels=block_out, temb_channels=self.temb_ch, dropout=dropout)) block_in = block_out if curr_res in attn_resolutions: attn.append(attn_op(block_in)) up = nn.Module() up.block = block up.attn = attn if i_level != 0: up.upsample = Upsample(block_in, resamp_with_conv) curr_res = curr_res * 2 self.up.insert(0, up) self.norm_out = Normalize(block_in) self.conv_out = conv_out_op(block_in, out_ch, kernel_size=3, stride=1, padding=1) def forward(self, z, **kwargs): self.last_z_shape = z.shape temb = None h = self.conv_in(z) h = self.mid.block_1(h, temb, **kwargs) h = self.mid.attn_1(h, **kwargs) h = self.mid.block_2(h, temb, **kwargs) for i_level in reversed(range(self.num_resolutions)): for i_block in range(self.num_res_blocks+1): h = self.up[i_level].block[i_block](h, temb, **kwargs) if len(self.up[i_level].attn) > 0: h = self.up[i_level].attn[i_block](h, **kwargs) if i_level != 0: h = self.up[i_level].upsample(h) if self.give_pre_end: return h h = self.norm_out(h) h = nonlinearity(h) h = self.conv_out(h, **kwargs) if self.tanh_out: h = torch.tanh(h) return h
from abc import abstractmethod import torch as th import torch.nn as nn import torch.nn.functional as F from einops import rearrange import logging from .util import ( checkpoint, avg_pool_nd, zero_module, timestep_embedding, AlphaBlender, ) from ..attention import SpatialTransformer, SpatialVideoTransformer, default from comfy.ldm.util import exists import comfy.ops ops = comfy.ops.disable_weight_init class TimestepBlock(nn.Module): """ Any module where forward() takes timestep embeddings as a second argument. """ @abstractmethod def forward(self, x, emb): """ Apply the module to `x` given `emb` timestep embeddings. """ def forward_timestep_embed(ts, x, emb, context=None, transformer_options={}, output_shape=None, time_context=None, num_video_frames=None, image_only_indicator=None): for layer in ts: if isinstance(layer, VideoResBlock): x = layer(x, emb, num_video_frames, image_only_indicator) elif isinstance(layer, TimestepBlock): x = layer(x, emb) elif isinstance(layer, SpatialVideoTransformer): x = layer(x, context, time_context, num_video_frames, image_only_indicator, transformer_options) if "transformer_index" in transformer_options: transformer_options["transformer_index"] += 1 elif isinstance(layer, SpatialTransformer): x = layer(x, context, transformer_options) if "transformer_index" in transformer_options: transformer_options["transformer_index"] += 1 elif isinstance(layer, Upsample): x = layer(x, output_shape=output_shape) else: x = layer(x) return x class TimestepEmbedSequential(nn.Sequential, TimestepBlock): """ A sequential module that passes timestep embeddings to the children that support it as an extra input. """ def forward(self, *args, **kwargs): return forward_timestep_embed(self, *args, **kwargs) class Upsample(nn.Module): """ An upsampling layer with an optional convolution. :param channels: channels in the inputs and outputs. :param use_conv: a bool determining if a convolution is applied. :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then upsampling occurs in the inner-two dimensions. """ def __init__(self, channels, use_conv, dims=2, out_channels=None, padding=1, dtype=None, device=None, operations=ops): super().__init__() self.channels = channels self.out_channels = out_channels or channels self.use_conv = use_conv self.dims = dims if use_conv: self.conv = operations.conv_nd(dims, self.channels, self.out_channels, 3, padding=padding, dtype=dtype, device=device) def forward(self, x, output_shape=None): assert x.shape[1] == self.channels if self.dims == 3: shape = [x.shape[2], x.shape[3] * 2, x.shape[4] * 2] if output_shape is not None: shape[1] = output_shape[3] shape[2] = output_shape[4] else: shape = [x.shape[2] * 2, x.shape[3] * 2] if output_shape is not None: shape[0] = output_shape[2] shape[1] = output_shape[3] x = F.interpolate(x, size=shape, mode="nearest") if self.use_conv: x = self.conv(x) return x class Downsample(nn.Module): """ A downsampling layer with an optional convolution. :param channels: channels in the inputs and outputs. :param use_conv: a bool determining if a convolution is applied. :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then downsampling occurs in the inner-two dimensions. """ def __init__(self, channels, use_conv, dims=2, out_channels=None, padding=1, dtype=None, device=None, operations=ops): super().__init__() self.channels = channels self.out_channels = out_channels or channels self.use_conv = use_conv self.dims = dims stride = 2 if dims != 3 else (1, 2, 2) if use_conv: self.op = operations.conv_nd( dims, self.channels, self.out_channels, 3, stride=stride, padding=padding, dtype=dtype, device=device ) else: assert self.channels == self.out_channels self.op = avg_pool_nd(dims, kernel_size=stride, stride=stride) def forward(self, x): assert x.shape[1] == self.channels return self.op(x) class ResBlock(TimestepBlock): """ A residual block that can optionally change the number of channels. :param channels: the number of input channels. :param emb_channels: the number of timestep embedding channels. :param dropout: the rate of dropout. :param out_channels: if specified, the number of out channels. :param use_conv: if True and out_channels is specified, use a spatial convolution instead of a smaller 1x1 convolution to change the channels in the skip connection. :param dims: determines if the signal is 1D, 2D, or 3D. :param use_checkpoint: if True, use gradient checkpointing on this module. :param up: if True, use this block for upsampling. :param down: if True, use this block for downsampling. """ def __init__( self, channels, emb_channels, dropout, out_channels=None, use_conv=False, use_scale_shift_norm=False, dims=2, use_checkpoint=False, up=False, down=False, kernel_size=3, exchange_temb_dims=False, skip_t_emb=False, dtype=None, device=None, operations=ops ): super().__init__() self.channels = channels self.emb_channels = emb_channels self.dropout = dropout self.out_channels = out_channels or channels self.use_conv = use_conv self.use_checkpoint = use_checkpoint self.use_scale_shift_norm = use_scale_shift_norm self.exchange_temb_dims = exchange_temb_dims if isinstance(kernel_size, list): padding = [k else: padding = kernel_size self.in_layers = nn.Sequential( operations.GroupNorm(32, channels, dtype=dtype, device=device), nn.SiLU(), operations.conv_nd(dims, channels, self.out_channels, kernel_size, padding=padding, dtype=dtype, device=device), ) self.updown = up or down if up: self.h_upd = Upsample(channels, False, dims, dtype=dtype, device=device) self.x_upd = Upsample(channels, False, dims, dtype=dtype, device=device) elif down: self.h_upd = Downsample(channels, False, dims, dtype=dtype, device=device) self.x_upd = Downsample(channels, False, dims, dtype=dtype, device=device) else: self.h_upd = self.x_upd = nn.Identity() self.skip_t_emb = skip_t_emb if self.skip_t_emb: self.emb_layers = None self.exchange_temb_dims = False else: self.emb_layers = nn.Sequential( nn.SiLU(), operations.Linear( emb_channels, 2 * self.out_channels if use_scale_shift_norm else self.out_channels, dtype=dtype, device=device ), ) self.out_layers = nn.Sequential( operations.GroupNorm(32, self.out_channels, dtype=dtype, device=device), nn.SiLU(), nn.Dropout(p=dropout), operations.conv_nd(dims, self.out_channels, self.out_channels, kernel_size, padding=padding, dtype=dtype, device=device) , ) if self.out_channels == channels: self.skip_connection = nn.Identity() elif use_conv: self.skip_connection = operations.conv_nd( dims, channels, self.out_channels, kernel_size, padding=padding, dtype=dtype, device=device ) else: self.skip_connection = operations.conv_nd(dims, channels, self.out_channels, 1, dtype=dtype, device=device) def forward(self, x, emb): """ Apply the block to a Tensor, conditioned on a timestep embedding. :param x: an [N x C x ...] Tensor of features. :param emb: an [N x emb_channels] Tensor of timestep embeddings. :return: an [N x C x ...] Tensor of outputs. """ return checkpoint( self._forward, (x, emb), self.parameters(), self.use_checkpoint ) def _forward(self, x, emb): if self.updown: in_rest, in_conv = self.in_layers[:-1], self.in_layers[-1] h = in_rest(x) h = self.h_upd(h) x = self.x_upd(x) h = in_conv(h) else: h = self.in_layers(x) emb_out = None if not self.skip_t_emb: emb_out = self.emb_layers(emb).type(h.dtype) while len(emb_out.shape) < len(h.shape): emb_out = emb_out[..., None] if self.use_scale_shift_norm: out_norm, out_rest = self.out_layers[0], self.out_layers[1:] h = out_norm(h) if emb_out is not None: scale, shift = th.chunk(emb_out, 2, dim=1) h *= (1 + scale) h += shift h = out_rest(h) else: if emb_out is not None: if self.exchange_temb_dims: emb_out = emb_out.movedim(1, 2) h = h + emb_out h = self.out_layers(h) return self.skip_connection(x) + h class VideoResBlock(ResBlock): def __init__( self, channels: int, emb_channels: int, dropout: float, video_kernel_size=3, merge_strategy: str = "fixed", merge_factor: float = 0.5, out_channels=None, use_conv: bool = False, use_scale_shift_norm: bool = False, dims: int = 2, use_checkpoint: bool = False, up: bool = False, down: bool = False, dtype=None, device=None, operations=ops ): super().__init__( channels, emb_channels, dropout, out_channels=out_channels, use_conv=use_conv, use_scale_shift_norm=use_scale_shift_norm, dims=dims, use_checkpoint=use_checkpoint, up=up, down=down, dtype=dtype, device=device, operations=operations ) self.time_stack = ResBlock( default(out_channels, channels), emb_channels, dropout=dropout, dims=3, out_channels=default(out_channels, channels), use_scale_shift_norm=False, use_conv=False, up=False, down=False, kernel_size=video_kernel_size, use_checkpoint=use_checkpoint, exchange_temb_dims=True, dtype=dtype, device=device, operations=operations ) self.time_mixer = AlphaBlender( alpha=merge_factor, merge_strategy=merge_strategy, rearrange_pattern="b t -> b 1 t 1 1", ) def forward( self, x: th.Tensor, emb: th.Tensor, num_video_frames: int, image_only_indicator = None, ) -> th.Tensor: x = super().forward(x, emb) x_mix = rearrange(x, "(b t) c h w -> b c t h w", t=num_video_frames) x = rearrange(x, "(b t) c h w -> b c t h w", t=num_video_frames) x = self.time_stack( x, rearrange(emb, "(b t) ... -> b t ...", t=num_video_frames) ) x = self.time_mixer( x_spatial=x_mix, x_temporal=x, image_only_indicator=image_only_indicator ) x = rearrange(x, "b c t h w -> (b t) c h w") return x class Timestep(nn.Module): def __init__(self, dim): super().__init__() self.dim = dim def forward(self, t): return timestep_embedding(t, self.dim) def apply_control(h, control, name): if control is not None and name in control and len(control[name]) > 0: ctrl = control[name].pop() if ctrl is not None: try: h += ctrl except: logging.warning("warning control could not be applied {} {}".format(h.shape, ctrl.shape)) return h class UNetModel(nn.Module): """ The full UNet model with attention and timestep embedding. :param in_channels: channels in the input Tensor. :param model_channels: base channel count for the model. :param out_channels: channels in the output Tensor. :param num_res_blocks: number of residual blocks per downsample. :param dropout: the dropout probability. :param channel_mult: channel multiplier for each level of the UNet. :param conv_resample: if True, use learned convolutions for upsampling and downsampling. :param dims: determines if the signal is 1D, 2D, or 3D. :param num_classes: if specified (as an int), then this model will be class-conditional with `num_classes` classes. :param use_checkpoint: use gradient checkpointing to reduce memory usage. :param num_heads: the number of attention heads in each attention layer. :param num_heads_channels: if specified, ignore num_heads and instead use a fixed channel width per attention head. :param num_heads_upsample: works with num_heads to set a different number of heads for upsampling. Deprecated. :param use_scale_shift_norm: use a FiLM-like conditioning mechanism. :param resblock_updown: use residual blocks for up/downsampling. :param use_new_attention_order: use a different attention pattern for potentially increased efficiency. """ def __init__( self, image_size, in_channels, model_channels, out_channels, num_res_blocks, dropout=0, channel_mult=(1, 2, 4, 8), conv_resample=True, dims=2, num_classes=None, use_checkpoint=False, dtype=th.float32, num_heads=-1, num_head_channels=-1, num_heads_upsample=-1, use_scale_shift_norm=False, resblock_updown=False, use_new_attention_order=False, use_spatial_transformer=False, transformer_depth=1, context_dim=None, n_embed=None, legacy=True, disable_self_attentions=None, num_attention_blocks=None, disable_middle_self_attn=False, use_linear_in_transformer=False, adm_in_channels=None, transformer_depth_middle=None, transformer_depth_output=None, use_temporal_resblock=False, use_temporal_attention=False, time_context_dim=None, extra_ff_mix_layer=False, use_spatial_context=False, merge_strategy=None, merge_factor=0.0, video_kernel_size=None, disable_temporal_crossattention=False, max_ddpm_temb_period=10000, attn_precision=None, device=None, operations=ops, ): super().__init__() if context_dim is not None: assert use_spatial_transformer, 'Fool!! You forgot to use the spatial transformer for your cross-attention conditioning...' if num_heads_upsample == -1: num_heads_upsample = num_heads if num_heads == -1: assert num_head_channels != -1, 'Either num_heads or num_head_channels has to be set' if num_head_channels == -1: assert num_heads != -1, 'Either num_heads or num_head_channels has to be set' self.in_channels = in_channels self.model_channels = model_channels self.out_channels = out_channels if isinstance(num_res_blocks, int): self.num_res_blocks = len(channel_mult) * [num_res_blocks] else: if len(num_res_blocks) != len(channel_mult): raise ValueError("provide num_res_blocks either as an int (globally constant) or " "as a list/tuple (per-level) with the same length as channel_mult") self.num_res_blocks = num_res_blocks if disable_self_attentions is not None: assert len(disable_self_attentions) == len(channel_mult) if num_attention_blocks is not None: assert len(num_attention_blocks) == len(self.num_res_blocks) transformer_depth = transformer_depth[:] transformer_depth_output = transformer_depth_output[:] self.dropout = dropout self.channel_mult = channel_mult self.conv_resample = conv_resample self.num_classes = num_classes self.use_checkpoint = use_checkpoint self.dtype = dtype self.num_heads = num_heads self.num_head_channels = num_head_channels self.num_heads_upsample = num_heads_upsample self.use_temporal_resblocks = use_temporal_resblock self.predict_codebook_ids = n_embed is not None self.default_num_video_frames = None time_embed_dim = model_channels * 4 self.time_embed = nn.Sequential( operations.Linear(model_channels, time_embed_dim, dtype=self.dtype, device=device), nn.SiLU(), operations.Linear(time_embed_dim, time_embed_dim, dtype=self.dtype, device=device), ) if self.num_classes is not None: if isinstance(self.num_classes, int): self.label_emb = nn.Embedding(num_classes, time_embed_dim, dtype=self.dtype, device=device) elif self.num_classes == "continuous": logging.debug("setting up linear c_adm embedding layer") self.label_emb = nn.Linear(1, time_embed_dim) elif self.num_classes == "sequential": assert adm_in_channels is not None self.label_emb = nn.Sequential( nn.Sequential( operations.Linear(adm_in_channels, time_embed_dim, dtype=self.dtype, device=device), nn.SiLU(), operations.Linear(time_embed_dim, time_embed_dim, dtype=self.dtype, device=device), ) ) else: raise ValueError() self.input_blocks = nn.ModuleList( [ TimestepEmbedSequential( operations.conv_nd(dims, in_channels, model_channels, 3, padding=1, dtype=self.dtype, device=device) ) ] ) self._feature_size = model_channels input_block_chans = [model_channels] ch = model_channels ds = 1 def get_attention_layer( ch, num_heads, dim_head, depth=1, context_dim=None, use_checkpoint=False, disable_self_attn=False, ): if use_temporal_attention: return SpatialVideoTransformer( ch, num_heads, dim_head, depth=depth, context_dim=context_dim, time_context_dim=time_context_dim, dropout=dropout, ff_in=extra_ff_mix_layer, use_spatial_context=use_spatial_context, merge_strategy=merge_strategy, merge_factor=merge_factor, checkpoint=use_checkpoint, use_linear=use_linear_in_transformer, disable_self_attn=disable_self_attn, disable_temporal_crossattention=disable_temporal_crossattention, max_time_embed_period=max_ddpm_temb_period, attn_precision=attn_precision, dtype=self.dtype, device=device, operations=operations ) else: return SpatialTransformer( ch, num_heads, dim_head, depth=depth, context_dim=context_dim, disable_self_attn=disable_self_attn, use_linear=use_linear_in_transformer, use_checkpoint=use_checkpoint, attn_precision=attn_precision, dtype=self.dtype, device=device, operations=operations ) def get_resblock( merge_factor, merge_strategy, video_kernel_size, ch, time_embed_dim, dropout, out_channels, dims, use_checkpoint, use_scale_shift_norm, down=False, up=False, dtype=None, device=None, operations=ops ): if self.use_temporal_resblocks: return VideoResBlock( merge_factor=merge_factor, merge_strategy=merge_strategy, video_kernel_size=video_kernel_size, channels=ch, emb_channels=time_embed_dim, dropout=dropout, out_channels=out_channels, dims=dims, use_checkpoint=use_checkpoint, use_scale_shift_norm=use_scale_shift_norm, down=down, up=up, dtype=dtype, device=device, operations=operations ) else: return ResBlock( channels=ch, emb_channels=time_embed_dim, dropout=dropout, out_channels=out_channels, use_checkpoint=use_checkpoint, dims=dims, use_scale_shift_norm=use_scale_shift_norm, down=down, up=up, dtype=dtype, device=device, operations=operations ) for level, mult in enumerate(channel_mult): for nr in range(self.num_res_blocks[level]): layers = [ get_resblock( merge_factor=merge_factor, merge_strategy=merge_strategy, video_kernel_size=video_kernel_size, ch=ch, time_embed_dim=time_embed_dim, dropout=dropout, out_channels=mult * model_channels, dims=dims, use_checkpoint=use_checkpoint, use_scale_shift_norm=use_scale_shift_norm, dtype=self.dtype, device=device, operations=operations, ) ] ch = mult * model_channels num_transformers = transformer_depth.pop(0) if num_transformers > 0: if num_head_channels == -1: dim_head = ch else: num_heads = ch dim_head = num_head_channels if legacy: dim_head = ch if exists(disable_self_attentions): disabled_sa = disable_self_attentions[level] else: disabled_sa = False if not exists(num_attention_blocks) or nr < num_attention_blocks[level]: layers.append(get_attention_layer( ch, num_heads, dim_head, depth=num_transformers, context_dim=context_dim, disable_self_attn=disabled_sa, use_checkpoint=use_checkpoint) ) self.input_blocks.append(TimestepEmbedSequential(*layers)) self._feature_size += ch input_block_chans.append(ch) if level != len(channel_mult) - 1: out_ch = ch self.input_blocks.append( TimestepEmbedSequential( get_resblock( merge_factor=merge_factor, merge_strategy=merge_strategy, video_kernel_size=video_kernel_size, ch=ch, time_embed_dim=time_embed_dim, dropout=dropout, out_channels=out_ch, dims=dims, use_checkpoint=use_checkpoint, use_scale_shift_norm=use_scale_shift_norm, down=True, dtype=self.dtype, device=device, operations=operations ) if resblock_updown else Downsample( ch, conv_resample, dims=dims, out_channels=out_ch, dtype=self.dtype, device=device, operations=operations ) ) ) ch = out_ch input_block_chans.append(ch) ds *= 2 self._feature_size += ch if num_head_channels == -1: dim_head = ch else: num_heads = ch dim_head = num_head_channels if legacy: dim_head = ch mid_block = [ get_resblock( merge_factor=merge_factor, merge_strategy=merge_strategy, video_kernel_size=video_kernel_size, ch=ch, time_embed_dim=time_embed_dim, dropout=dropout, out_channels=None, dims=dims, use_checkpoint=use_checkpoint, use_scale_shift_norm=use_scale_shift_norm, dtype=self.dtype, device=device, operations=operations )] self.middle_block = None if transformer_depth_middle >= -1: if transformer_depth_middle >= 0: mid_block += [get_attention_layer( ch, num_heads, dim_head, depth=transformer_depth_middle, context_dim=context_dim, disable_self_attn=disable_middle_self_attn, use_checkpoint=use_checkpoint ), get_resblock( merge_factor=merge_factor, merge_strategy=merge_strategy, video_kernel_size=video_kernel_size, ch=ch, time_embed_dim=time_embed_dim, dropout=dropout, out_channels=None, dims=dims, use_checkpoint=use_checkpoint, use_scale_shift_norm=use_scale_shift_norm, dtype=self.dtype, device=device, operations=operations )] self.middle_block = TimestepEmbedSequential(*mid_block) self._feature_size += ch self.output_blocks = nn.ModuleList([]) for level, mult in list(enumerate(channel_mult))[::-1]: for i in range(self.num_res_blocks[level] + 1): ich = input_block_chans.pop() layers = [ get_resblock( merge_factor=merge_factor, merge_strategy=merge_strategy, video_kernel_size=video_kernel_size, ch=ch + ich, time_embed_dim=time_embed_dim, dropout=dropout, out_channels=model_channels * mult, dims=dims, use_checkpoint=use_checkpoint, use_scale_shift_norm=use_scale_shift_norm, dtype=self.dtype, device=device, operations=operations ) ] ch = model_channels * mult num_transformers = transformer_depth_output.pop() if num_transformers > 0: if num_head_channels == -1: dim_head = ch else: num_heads = ch dim_head = num_head_channels if legacy: dim_head = ch if exists(disable_self_attentions): disabled_sa = disable_self_attentions[level] else: disabled_sa = False if not exists(num_attention_blocks) or i < num_attention_blocks[level]: layers.append( get_attention_layer( ch, num_heads, dim_head, depth=num_transformers, context_dim=context_dim, disable_self_attn=disabled_sa, use_checkpoint=use_checkpoint ) ) if level and i == self.num_res_blocks[level]: out_ch = ch layers.append( get_resblock( merge_factor=merge_factor, merge_strategy=merge_strategy, video_kernel_size=video_kernel_size, ch=ch, time_embed_dim=time_embed_dim, dropout=dropout, out_channels=out_ch, dims=dims, use_checkpoint=use_checkpoint, use_scale_shift_norm=use_scale_shift_norm, up=True, dtype=self.dtype, device=device, operations=operations ) if resblock_updown else Upsample(ch, conv_resample, dims=dims, out_channels=out_ch, dtype=self.dtype, device=device, operations=operations) ) ds self.output_blocks.append(TimestepEmbedSequential(*layers)) self._feature_size += ch self.out = nn.Sequential( operations.GroupNorm(32, ch, dtype=self.dtype, device=device), nn.SiLU(), zero_module(operations.conv_nd(dims, model_channels, out_channels, 3, padding=1, dtype=self.dtype, device=device)), ) if self.predict_codebook_ids: self.id_predictor = nn.Sequential( operations.GroupNorm(32, ch, dtype=self.dtype, device=device), operations.conv_nd(dims, model_channels, n_embed, 1, dtype=self.dtype, device=device), ) def forward(self, x, timesteps=None, context=None, y=None, control=None, transformer_options={}, **kwargs): """ Apply the model to an input batch. :param x: an [N x C x ...] Tensor of inputs. :param timesteps: a 1-D batch of timesteps. :param context: conditioning plugged in via crossattn :param y: an [N] Tensor of labels, if class-conditional. :return: an [N x C x ...] Tensor of outputs. """ transformer_options["original_shape"] = list(x.shape) transformer_options["transformer_index"] = 0 transformer_patches = transformer_options.get("patches", {}) num_video_frames = kwargs.get("num_video_frames", self.default_num_video_frames) image_only_indicator = kwargs.get("image_only_indicator", None) time_context = kwargs.get("time_context", None) assert (y is not None) == ( self.num_classes is not None ), "must specify y if and only if the model is class-conditional" hs = [] t_emb = timestep_embedding(timesteps, self.model_channels, repeat_only=False).to(x.dtype) emb = self.time_embed(t_emb) if self.num_classes is not None: assert y.shape[0] == x.shape[0] emb = emb + self.label_emb(y) h = x for id, module in enumerate(self.input_blocks): transformer_options["block"] = ("input", id) h = forward_timestep_embed(module, h, emb, context, transformer_options, time_context=time_context, num_video_frames=num_video_frames, image_only_indicator=image_only_indicator) h = apply_control(h, control, 'input') if "input_block_patch" in transformer_patches: patch = transformer_patches["input_block_patch"] for p in patch: h = p(h, transformer_options) hs.append(h) if "input_block_patch_after_skip" in transformer_patches: patch = transformer_patches["input_block_patch_after_skip"] for p in patch: h = p(h, transformer_options) transformer_options["block"] = ("middle", 0) if self.middle_block is not None: h = forward_timestep_embed(self.middle_block, h, emb, context, transformer_options, time_context=time_context, num_video_frames=num_video_frames, image_only_indicator=image_only_indicator) h = apply_control(h, control, 'middle') for id, module in enumerate(self.output_blocks): transformer_options["block"] = ("output", id) hsp = hs.pop() hsp = apply_control(hsp, control, 'output') if "output_block_patch" in transformer_patches: patch = transformer_patches["output_block_patch"] for p in patch: h, hsp = p(h, hsp, transformer_options) h = th.cat([h, hsp], dim=1) del hsp if len(hs) > 0: output_shape = hs[-1].shape else: output_shape = None h = forward_timestep_embed(module, h, emb, context, transformer_options, output_shape, time_context=time_context, num_video_frames=num_video_frames, image_only_indicator=image_only_indicator) h = h.type(x.dtype) if self.predict_codebook_ids: return self.id_predictor(h) else: return self.out(h)
import torch import torch.nn as nn import numpy as np from functools import partial from .util import extract_into_tensor, make_beta_schedule from comfy.ldm.util import default class AbstractLowScaleModel(nn.Module): def __init__(self, noise_schedule_config=None): super(AbstractLowScaleModel, self).__init__() if noise_schedule_config is not None: self.register_schedule(**noise_schedule_config) def register_schedule(self, beta_schedule="linear", timesteps=1000, linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3): betas = make_beta_schedule(beta_schedule, timesteps, linear_start=linear_start, linear_end=linear_end, cosine_s=cosine_s) alphas = 1. - betas alphas_cumprod = np.cumprod(alphas, axis=0) alphas_cumprod_prev = np.append(1., alphas_cumprod[:-1]) timesteps, = betas.shape self.num_timesteps = int(timesteps) self.linear_start = linear_start self.linear_end = linear_end assert alphas_cumprod.shape[0] == self.num_timesteps, 'alphas have to be defined for each timestep' to_torch = partial(torch.tensor, dtype=torch.float32) self.register_buffer('betas', to_torch(betas)) self.register_buffer('alphas_cumprod', to_torch(alphas_cumprod)) self.register_buffer('alphas_cumprod_prev', to_torch(alphas_cumprod_prev)) self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod))) self.register_buffer('sqrt_one_minus_alphas_cumprod', to_torch(np.sqrt(1. - alphas_cumprod))) self.register_buffer('log_one_minus_alphas_cumprod', to_torch(np.log(1. - alphas_cumprod))) self.register_buffer('sqrt_recip_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod))) self.register_buffer('sqrt_recipm1_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod - 1))) def q_sample(self, x_start, t, noise=None, seed=None): if noise is None: if seed is None: noise = torch.randn_like(x_start) else: noise = torch.randn(x_start.size(), dtype=x_start.dtype, layout=x_start.layout, generator=torch.manual_seed(seed)).to(x_start.device) return (extract_into_tensor(self.sqrt_alphas_cumprod.to(x_start.device), t, x_start.shape) * x_start + extract_into_tensor(self.sqrt_one_minus_alphas_cumprod.to(x_start.device), t, x_start.shape) * noise) def forward(self, x): return x, None def decode(self, x): return x class SimpleImageConcat(AbstractLowScaleModel): def __init__(self): super(SimpleImageConcat, self).__init__(noise_schedule_config=None) self.max_noise_level = 0 def forward(self, x): return x, torch.zeros(x.shape[0], device=x.device).long() class ImageConcatWithNoiseAugmentation(AbstractLowScaleModel): def __init__(self, noise_schedule_config, max_noise_level=1000, to_cuda=False): super().__init__(noise_schedule_config=noise_schedule_config) self.max_noise_level = max_noise_level def forward(self, x, noise_level=None, seed=None): if noise_level is None: noise_level = torch.randint(0, self.max_noise_level, (x.shape[0],), device=x.device).long() else: assert isinstance(noise_level, torch.Tensor) z = self.q_sample(x, noise_level, seed=seed) return z, noise_level
import os import math import torch import torch.nn as nn import numpy as np from einops import repeat, rearrange from comfy.ldm.util import instantiate_from_config class AlphaBlender(nn.Module): strategies = ["learned", "fixed", "learned_with_images"] def __init__( self, alpha: float, merge_strategy: str = "learned_with_images", rearrange_pattern: str = "b t -> (b t) 1 1", ): super().__init__() self.merge_strategy = merge_strategy self.rearrange_pattern = rearrange_pattern assert ( merge_strategy in self.strategies ), f"merge_strategy needs to be in {self.strategies}" if self.merge_strategy == "fixed": self.register_buffer("mix_factor", torch.Tensor([alpha])) elif ( self.merge_strategy == "learned" or self.merge_strategy == "learned_with_images" ): self.register_parameter( "mix_factor", torch.nn.Parameter(torch.Tensor([alpha])) ) else: raise ValueError(f"unknown merge strategy {self.merge_strategy}") def get_alpha(self, image_only_indicator: torch.Tensor, device) -> torch.Tensor: if self.merge_strategy == "fixed": alpha = self.mix_factor.to(device) elif self.merge_strategy == "learned": alpha = torch.sigmoid(self.mix_factor.to(device)) elif self.merge_strategy == "learned_with_images": if image_only_indicator is None: alpha = rearrange(torch.sigmoid(self.mix_factor.to(device)), "... -> ... 1") else: alpha = torch.where( image_only_indicator.bool(), torch.ones(1, 1, device=image_only_indicator.device), rearrange(torch.sigmoid(self.mix_factor.to(image_only_indicator.device)), "... -> ... 1"), ) alpha = rearrange(alpha, self.rearrange_pattern) else: raise NotImplementedError() return alpha def forward( self, x_spatial, x_temporal, image_only_indicator=None, ) -> torch.Tensor: alpha = self.get_alpha(image_only_indicator, x_spatial.device) x = ( alpha.to(x_spatial.dtype) * x_spatial + (1.0 - alpha).to(x_spatial.dtype) * x_temporal ) return x def make_beta_schedule(schedule, n_timestep, linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3): if schedule == "linear": betas = ( torch.linspace(linear_start ** 0.5, linear_end ** 0.5, n_timestep, dtype=torch.float64) ** 2 ) elif schedule == "cosine": timesteps = ( torch.arange(n_timestep + 1, dtype=torch.float64) / n_timestep + cosine_s ) alphas = timesteps / (1 + cosine_s) * np.pi / 2 alphas = torch.cos(alphas).pow(2) alphas = alphas / alphas[0] betas = 1 - alphas[1:] / alphas[:-1] betas = torch.clamp(betas, min=0, max=0.999) elif schedule == "squaredcos_cap_v2": return betas_for_alpha_bar( n_timestep, lambda t: math.cos((t + 0.008) / 1.008 * math.pi / 2) ** 2, ) elif schedule == "sqrt_linear": betas = torch.linspace(linear_start, linear_end, n_timestep, dtype=torch.float64) elif schedule == "sqrt": betas = torch.linspace(linear_start, linear_end, n_timestep, dtype=torch.float64) ** 0.5 else: raise ValueError(f"schedule '{schedule}' unknown.") return betas def make_ddim_timesteps(ddim_discr_method, num_ddim_timesteps, num_ddpm_timesteps, verbose=True): if ddim_discr_method == 'uniform': c = num_ddpm_timesteps ddim_timesteps = np.asarray(list(range(0, num_ddpm_timesteps, c))) elif ddim_discr_method == 'quad': ddim_timesteps = ((np.linspace(0, np.sqrt(num_ddpm_timesteps * .8), num_ddim_timesteps)) ** 2).astype(int) else: raise NotImplementedError(f'There is no ddim discretization method called "{ddim_discr_method}"') steps_out = ddim_timesteps + 1 if verbose: print(f'Selected timesteps for ddim sampler: {steps_out}') return steps_out def make_ddim_sampling_parameters(alphacums, ddim_timesteps, eta, verbose=True): alphas = alphacums[ddim_timesteps] alphas_prev = np.asarray([alphacums[0]] + alphacums[ddim_timesteps[:-1]].tolist()) sigmas = eta * np.sqrt((1 - alphas_prev) / (1 - alphas) * (1 - alphas / alphas_prev)) if verbose: print(f'Selected alphas for ddim sampler: a_t: {alphas}; a_(t-1): {alphas_prev}') print(f'For the chosen value of eta, which is {eta}, ' f'this results in the following sigma_t schedule for ddim sampler {sigmas}') return sigmas, alphas, alphas_prev def betas_for_alpha_bar(num_diffusion_timesteps, alpha_bar, max_beta=0.999): """ Create a beta schedule that discretizes the given alpha_t_bar function, which defines the cumulative product of (1-beta) over time from t = [0,1]. :param num_diffusion_timesteps: the number of betas to produce. :param alpha_bar: a lambda that takes an argument t from 0 to 1 and produces the cumulative product of (1-beta) up to that part of the diffusion process. :param max_beta: the maximum beta to use; use values lower than 1 to prevent singularities. """ betas = [] for i in range(num_diffusion_timesteps): t1 = i / num_diffusion_timesteps t2 = (i + 1) / num_diffusion_timesteps betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta)) return np.array(betas) def extract_into_tensor(a, t, x_shape): b, *_ = t.shape out = a.gather(-1, t) return out.reshape(b, *((1,) * (len(x_shape) - 1))) def checkpoint(func, inputs, params, flag): """ Evaluate a function without caching intermediate activations, allowing for reduced memory at the expense of extra compute in the backward pass. :param func: the function to evaluate. :param inputs: the argument sequence to pass to `func`. :param params: a sequence of parameters `func` depends on but does not explicitly take as arguments. :param flag: if False, disable gradient checkpointing. """ if flag: args = tuple(inputs) + tuple(params) return CheckpointFunction.apply(func, len(inputs), *args) else: return func(*inputs) class CheckpointFunction(torch.autograd.Function): @staticmethod def forward(ctx, run_function, length, *args): ctx.run_function = run_function ctx.input_tensors = list(args[:length]) ctx.input_params = list(args[length:]) ctx.gpu_autocast_kwargs = {"enabled": torch.is_autocast_enabled(), "dtype": torch.get_autocast_gpu_dtype(), "cache_enabled": torch.is_autocast_cache_enabled()} with torch.no_grad(): output_tensors = ctx.run_function(*ctx.input_tensors) return output_tensors @staticmethod def backward(ctx, *output_grads): ctx.input_tensors = [x.detach().requires_grad_(True) for x in ctx.input_tensors] with torch.enable_grad(), \ torch.cuda.amp.autocast(**ctx.gpu_autocast_kwargs): shallow_copies = [x.view_as(x) for x in ctx.input_tensors] output_tensors = ctx.run_function(*shallow_copies) input_grads = torch.autograd.grad( output_tensors, ctx.input_tensors + ctx.input_params, output_grads, allow_unused=True, ) del ctx.input_tensors del ctx.input_params del output_tensors return (None, None) + input_grads def timestep_embedding(timesteps, dim, max_period=10000, repeat_only=False): """ Create sinusoidal timestep embeddings. :param timesteps: a 1-D Tensor of N indices, one per batch element. These may be fractional. :param dim: the dimension of the output. :param max_period: controls the minimum frequency of the embeddings. :return: an [N x dim] Tensor of positional embeddings. """ if not repeat_only: half = dim freqs = torch.exp( -math.log(max_period) * torch.arange(start=0, end=half, dtype=torch.float32, device=timesteps.device) / half ) args = timesteps[:, None].float() * freqs[None] embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1) if dim % 2: embedding = torch.cat([embedding, torch.zeros_like(embedding[:, :1])], dim=-1) else: embedding = repeat(timesteps, 'b -> b d', d=dim) return embedding def zero_module(module): """ Zero out the parameters of a module and return it. """ for p in module.parameters(): p.detach().zero_() return module def scale_module(module, scale): """ Scale the parameters of a module and return it. """ for p in module.parameters(): p.detach().mul_(scale) return module def mean_flat(tensor): """ Take the mean over all non-batch dimensions. """ return tensor.mean(dim=list(range(1, len(tensor.shape)))) def avg_pool_nd(dims, *args, **kwargs): """ Create a 1D, 2D, or 3D average pooling module. """ if dims == 1: return nn.AvgPool1d(*args, **kwargs) elif dims == 2: return nn.AvgPool2d(*args, **kwargs) elif dims == 3: return nn.AvgPool3d(*args, **kwargs) raise ValueError(f"unsupported dimensions: {dims}") class HybridConditioner(nn.Module): def __init__(self, c_concat_config, c_crossattn_config): super().__init__() self.concat_conditioner = instantiate_from_config(c_concat_config) self.crossattn_conditioner = instantiate_from_config(c_crossattn_config) def forward(self, c_concat, c_crossattn): c_concat = self.concat_conditioner(c_concat) c_crossattn = self.crossattn_conditioner(c_crossattn) return {'c_concat': [c_concat], 'c_crossattn': [c_crossattn]} def noise_like(shape, device, repeat=False): repeat_noise = lambda: torch.randn((1, *shape[1:]), device=device).repeat(shape[0], *((1,) * (len(shape) - 1))) noise = lambda: torch.randn(shape, device=device) return repeat_noise() if repeat else noise()
import torch import numpy as np class AbstractDistribution: def sample(self): raise NotImplementedError() def mode(self): raise NotImplementedError() class DiracDistribution(AbstractDistribution): def __init__(self, value): self.value = value def sample(self): return self.value def mode(self): return self.value class DiagonalGaussianDistribution(object): def __init__(self, parameters, deterministic=False): self.parameters = parameters self.mean, self.logvar = torch.chunk(parameters, 2, dim=1) self.logvar = torch.clamp(self.logvar, -30.0, 20.0) self.deterministic = deterministic self.std = torch.exp(0.5 * self.logvar) self.var = torch.exp(self.logvar) if self.deterministic: self.var = self.std = torch.zeros_like(self.mean).to(device=self.parameters.device) def sample(self): x = self.mean + self.std * torch.randn(self.mean.shape).to(device=self.parameters.device) return x def kl(self, other=None): if self.deterministic: return torch.Tensor([0.]) else: if other is None: return 0.5 * torch.sum(torch.pow(self.mean, 2) + self.var - 1.0 - self.logvar, dim=[1, 2, 3]) else: return 0.5 * torch.sum( torch.pow(self.mean - other.mean, 2) / other.var + self.var / other.var - 1.0 - self.logvar + other.logvar, dim=[1, 2, 3]) def nll(self, sample, dims=[1,2,3]): if self.deterministic: return torch.Tensor([0.]) logtwopi = np.log(2.0 * np.pi) return 0.5 * torch.sum( logtwopi + self.logvar + torch.pow(sample - self.mean, 2) / self.var, dim=dims) def mode(self): return self.mean def normal_kl(mean1, logvar1, mean2, logvar2): """ source: https: Compute the KL divergence between two gaussians. Shapes are automatically broadcasted, so batches can be compared to scalars, among other use cases. """ tensor = None for obj in (mean1, logvar1, mean2, logvar2): if isinstance(obj, torch.Tensor): tensor = obj break assert tensor is not None, "at least one argument must be a Tensor" logvar1, logvar2 = [ x if isinstance(x, torch.Tensor) else torch.tensor(x).to(tensor) for x in (logvar1, logvar2) ] return 0.5 * ( -1.0 + logvar2 - logvar1 + torch.exp(logvar1 - logvar2) + ((mean1 - mean2) ** 2) * torch.exp(-logvar2) )
from ..diffusionmodules.upscaling import ImageConcatWithNoiseAugmentation from ..diffusionmodules.openaimodel import Timestep import torch class CLIPEmbeddingNoiseAugmentation(ImageConcatWithNoiseAugmentation): def __init__(self, *args, clip_stats_path=None, timestep_dim=256, **kwargs): super().__init__(*args, **kwargs) if clip_stats_path is None: clip_mean, clip_std = torch.zeros(timestep_dim), torch.ones(timestep_dim) else: clip_mean, clip_std = torch.load(clip_stats_path, map_location="cpu") self.register_buffer("data_mean", clip_mean[None, :], persistent=False) self.register_buffer("data_std", clip_std[None, :], persistent=False) self.time_embed = Timestep(timestep_dim) def scale(self, x): x = (x - self.data_mean.to(x.device)) * 1. / self.data_std.to(x.device) return x def unscale(self, x): x = (x * self.data_std.to(x.device)) + self.data_mean.to(x.device) return x def forward(self, x, noise_level=None, seed=None): if noise_level is None: noise_level = torch.randint(0, self.max_noise_level, (x.shape[0],), device=x.device).long() else: assert isinstance(noise_level, torch.Tensor) x = self.scale(x) z = self.q_sample(x, noise_level, seed=seed) z = self.unscale(z) noise_level = self.time_embed(noise_level) return z, noise_level
{ "bos_token": { "content": "<|startoftext|>", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false }, "eos_token": { "content": "<|endoftext|>", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false }, "pad_token": "<|endoftext|>", "unk_token": { "content": "<|endoftext|>", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false } }
{ "add_prefix_space": false, "bos_token": { "__type": "AddedToken", "content": "<|startoftext|>", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false }, "do_lower_case": true, "eos_token": { "__type": "AddedToken", "content": "<|endoftext|>", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false }, "errors": "replace", "model_max_length": 77, "name_or_path": "openai/clip-vit-large-patch14", "pad_token": "<|endoftext|>", "special_tokens_map_file": "./special_tokens_map.json", "tokenizer_class": "CLIPTokenizer", "unk_token": { "__type": "AddedToken", "content": "<|endoftext|>", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false } }
{ "!": 0, "!!": 1443, "!!!": 11194, "!!!!": 4003, "!!!!!!!!": 11281, "!!!!!!!!!!!!!!!!": 30146, "!!!!!!!!!!!</w>": 49339, "!!!!!!!!!!</w>": 35579, "!!!!!!!!!</w>": 28560, "!!!!!!!!</w>": 21622, "!!!!!!!</w>": 15203, "!!!!!!</w>": 9168, "!!!!!</w>": 5203, "!!!!</w>": 2360, "!!!\"</w>": 28048, "!!!)</w>": 42532, "!!!</w>": 995, "!!\"</w>": 20556, "!! "!!)</w>": 28352, "!!</w>": 748, "!!@</w>": 40705, "!\"</w>": 2947, "!\"@</w>": 43819, "! "!'</w>": 13222, "!),</w>": 37904, "!).</w>": 26225, "!)</w>": 4571, "!*</w>": 37737, "!,</w>": 29325, "!-</w>": 43499, "!...</w>": 22121, "!..</w>": 35475, "!.</w>": 22517, "!:)</w>": 31671, "!:</w>": 17545, "!</w>": 256, "!?!</w>": 29767, "!?!?</w>": 47081, "!?</w>": 6004, "!@</w>": 15117, "!]</w>": 34466, "!âĢ¦</w>": 35068, "!âĿ¤ï¸ı</w>": 32559, "!ðŁİī</w>": 49085, "!ðŁĺĬ</w>": 43434, "!ðŁĺį</w>": 36438, "\"": 1, "\"!</w>": 10377, "\"\"": 41530, "\"\"\"</w>": 25539, "\"\"</w>": 8575, "\" "\"'</w>": 31065, "\"(</w>": 32741, "\")</w>": 13112, "\",</w>": 4332, "\"-</w>": 9375, "\"....</w>": 37785, "\"...</w>": 9049, "\"..</w>": 25403, "\".</w>": 2811, "\"/</w>": 39486, "\":</w>": 7811, "\";</w>": 37549, "\"</w>": 257, "\"?</w>": 11727, "\"@</w>": 1512, "\"@_</w>": 20236, "\"[</w>": 36930, "\"âĢ¦</w>": 33993, "\"âĢĶ</w>": 41151, " " " " " " " " "$": 3, "$$": 24233, "$$$</w>": 31859, "$$</w>": 14929, "$)</w>": 39460, "$.</w>": 34682, "$</w>": 259, "%": 4, "%!</w>": 35070, "%),</w>": 37819, "%)</w>": 16063, "%,</w>": 14505, "%-</w>": 48784, "%.</w>": 12475, "%;</w>": 33379, "%</w>": 260, "&": 5, "&&</w>": 27791, "&</w>": 261, "'": 6, "'!</w>": 13781, "'\"</w>": 19479, "' "''": 46594, "''</w>": 8445, "')</w>": 19175, "',</w>": 5662, "'-</w>": 26152, "'...</w>": 20474, "'.</w>": 4645, "':</w>": 7182, "';</w>": 44517, "'</w>": 262, "'?</w>": 17242, "'@</w>": 26397, "'d</w>": 1896, "'ll</w>": 1342, "'m</w>": 880, "'re</w>": 982, "'s</w>": 568, "'t</w>": 713, "'ve</w>": 1200, "'âĢ¦</w>": 42120, "(": 7, "(!)</w>": 30253, "(\"</w>": 18741, "( "($)</w>": 46597, "($</w>": 15186, "(&</w>": 15042, "('</w>": 18235, "((": 22944, "(((</w>": 33287, "((</w>": 13796, "().</w>": 41737, "()</w>": 8475, "(*": 48004, "(*</w>": 39575, "(+</w>": 12903, "(-</w>": 20228, "(...</w>": 45159, "(.</w>": 43055, "(:</w>": 8528, "(;</w>": 23983, "(</w>": 263, "(?)</w>": 22885, "(@</w>": 2181, "(£</w>": 33987, "(©</w>": 44886, "(ðŁĵ·:</w>": 34610, "(ðŁĵ·</w>": 37999, "(ðŁĵ¸:</w>": 44422, "(ðŁĵ¸</w>": 45204, ")": 8, ")!!</w>": 47518, ")!</w>": 7805, ")\"</w>": 13046, ") ")'</w>": 23613, ")(</w>": 27956, "))": 13720, "))))": 42911, "))))</w>": 34181, ")))</w>": 18305, "))</w>": 5167, "),</w>": 2361, ")-</w>": 19034, ")...</w>": 15274, ")..</w>": 41822, ").</w>": 1818, ")/</w>": 26616, "):</w>": 4143, ");</w>": 19686, ")</w>": 264, ")?</w>": 18765, ")@</w>": 41928, ")_/": 45028, ")_/¯</w>": 45781, ")âĢ¦</w>": 41844, "*": 9, "*)</w>": 30956, "**": 9825, "****": 21326, "********": 42974, "*****</w>": 43571, "****</w>": 25167, "***</w>": 7829, "**</w>": 4441, "*,</w>": 41895, "*-*</w>": 23568, "*.</w>": 31304, "*</w>": 265, "*_*</w>": 44535, "+": 10, "+)</w>": 34810, "++": 47298, "+++</w>": 35986, "++</w>": 19056, "+,</w>": 35885, "+.</w>": 25238, "+/-</w>": 47614, "+</w>": 266, ",": 11, ",\"</w>": 3823, ", ",&</w>": 26905, ",'</w>": 10599, ",)</w>": 44493, ",,": 21340, ",,,,</w>": 33225, ",,,</w>": 14811, ",,</w>": 8844, ",-</w>": 29821, ",...</w>": 20365, ",.</w>": 41277, ",</w>": 267, ",@</w>": 13975, ",âĢ¦</w>": 14601, "-": 12, "-\"</w>": 18646, "- "-$</w>": 24946, "-'</w>": 28010, "-(</w>": 33345, "-)</w>": 3535, "-*</w>": 21527, "--": 2154, "----": 5753, "--------": 11772, "----------------": 23122, "----</w>": 30164, "----></w>": 35999, "---</w>": 11079, "---></w>": 14518, "--</w>": 2432, "--></w>": 6422, "-->></w>": 47252, "-.-</w>": 32765, "-...</w>": 43147, "-.</w>": 44040, "-</w>": 268, "-></w>": 5081, "-@</w>": 10087, "-_-</w>": 27227, "-__": 42718, "-âĢ¦</w>": 30047, ".": 13, ".!!</w>": 37805, ".!</w>": 14030, ".\"": 18650, ".\"-</w>": 21234, ".\"</w>": 1081, ".\"âĢĶ</w>": 48703, ". ".'\"</w>": 41558, ".''</w>": 49379, ".'</w>": 5938, ".(</w>": 22294, ".)</w>": 5376, ".*</w>": 26145, ".,</w>": 5276, ".-</w>": 12481, "..": 608, "..!!</w>": 23707, "..!</w>": 17994, "..\"</w>": 15229, ".. "..,</w>": 47143, "...": 3002, "...!!!</w>": 38351, "...!!</w>": 39915, "...!</w>": 16860, "...\"</w>": 5240, "... "...&</w>": 44979, "...'</w>": 23167, "...(</w>": 37981, "...)</w>": 14040, "...,</w>": 42717, "....": 2386, "....\"</w>": 26689, ".... ".....": 34151, "..... "........": 8246, "................": 24855, "............</w>": 42965, "...........</w>": 35008, "..........</w>": 25526, ".........</w>": 19881, "........</w>": 14720, ".......</w>": 9917, "......</w>": 5590, ".....</w>": 3104, "....</w>": 1390, "....@</w>": 29790, "...:</w>": 34570, "...</w>": 678, "...?</w>": 16388, "...@</w>": 12672, "..</w>": 852, "..?</w>": 23875, "..@</w>": 21124, "./</w>": 31975, ".:</w>": 15811, ".;</w>": 47596, ".</w>": 269, ".<</w>": 29442, ".?</w>": 29294, ".@</w>": 1230, ".]</w>": 33511, ".~</w>": 42651, ".âĢ¦</w>": 18047, ".âĿ¤ï¸ı</w>": 39085, ".âłĢ</w>": 30097, ".ðŁĺĤ</w>": 46580, "/": 14, "/ "/$</w>": 36266, "/-</w>": 19811, "/.</w>": 39382, " " " " "/</w>": 270, "/@</w>": 8216, "0": 15, "0</w>": 271, "1": 16, "1</w>": 272, "2": 17, "2</w>": 273, "3": 18, "3</w>": 274, "4": 19, "4</w>": 275, "5": 20, "5</w>": 276, "6": 21, "6</w>": 277, "7": 22, "7</w>": 278, "8": 23, "8</w>": 279, "9": 24, "9</w>": 280, ":": 25, ":\"": 29498, ":\")</w>": 46432, ":\"</w>": 12089, ": ":$</w>": 33769, ":'": 8017, ":'(</w>": 21250, ":')</w>": 10701, ":'</w>": 23851, ":((</w>": 42496, ":(</w>": 5965, ":)": 11070, ":))))</w>": 42339, ":)))</w>": 21840, ":))</w>": 10164, ":).</w>": 39010, ":)</w>": 1408, ":*</w>": 12617, ":-": 13021, ":-(</w>": 25137, ":-)</w>": 4223, ":-</w>": 10323, ":...</w>": 42140, ": ":/</w>": 13604, "::": 33077, ":::</w>": 43818, "::</w>": 9788, ":</w>": 281, ":></w>": 39677, ":@</w>": 14339, ":]</w>": 43486, ":|</w>": 45986, ":âĢ¦</w>": 22365, ";": 26, ";))</w>": 41873, ";)</w>": 3661, ";-": 35657, ";-)</w>": 10475, ";;": 34824, ";;</w>": 24492, ";</w>": 282, "<": 27, "<-</w>": 47280, "</</w>": 34308, "<<": 24588, "<</w>": 283, "<<</w>": 16482, "<<<</w>": 35054, "<|endoftext|>": 49407, "<|startoftext|>": 49406, "=": 28, "=))</w>": 39587, "=)</w>": 17840, "=</w>": 284, "==": 11748, "====": 21734, "========": 38952, "==></w>": 29688, "=></w>": 9714, ">": 29, ">.<</w>": 38507, ">:</w>": 36196, "></w>": 285, "><</w>": 28015, ">>": 8270, ">></w>": 2988, ">>></w>": 6395, ">>>>": 18461, ">>>></w>": 18435, ">>>>></w>": 32972, ">>>>>></w>": 48947, ">>>>>>>>": 41947, ">_": 44144, "?": 30, "?!": 9785, "?!!</w>": 25342, "?!\"</w>": 29315, "?!</w>": 2835, "?!?!</w>": 16349, "?!?!?!</w>": 49084, "?!?!?</w>": 37619, "?!?</w>": 11395, "?\"</w>": 3283, "? "?'</w>": 13610, "?)</w>": 9626, "?,</w>": 41628, "?...</w>": 22641, "?..</w>": 43905, "?.</w>": 41251, "?:</w>": 21067, "?</w>": 286, "??": 5195, "??!!</w>": 43219, "??!</w>": 37341, "??\"</w>": 44996, "??</w>": 2197, "???": 40017, "???</w>": 3824, "????": 15936, "????</w>": 10362, "?????</w>": 21370, "??????</w>": 34589, "????????": 45091, "?@</w>": 29258, "?ðŁ¤Ķ</w>": 47928, "@": 31, "@ "@.</w>": 43730, "@/</w>": 28639, "@</w>": 287, "@@</w>": 30314, "@_</w>": 2692, "@__</w>": 17042, "@___</w>": 48308, "A": 32, "A</w>": 288, "B": 33, "B</w>": 289, "C": 34, "C</w>": 290, "D": 35, "D</w>": 291, "E": 36, "E</w>": 292, "F": 37, "F</w>": 293, "G": 38, "G</w>": 294, "H": 39, "H</w>": 295, "I": 40, "I</w>": 296, "J": 41, "J</w>": 297, "K": 42, "K</w>": 298, "L": 43, "L</w>": 299, "M": 44, "M</w>": 300, "N": 45, "N</w>": 301, "O": 46, "O</w>": 302, "P": 47, "P</w>": 303, "Q": 48, "Q</w>": 304, "R": 49, "R</w>": 305, "S": 50, "S</w>": 306, "T": 51, "T</w>": 307, "U": 52, "U</w>": 308, "V": 53, "V</w>": 309, "W": 54, "W</w>": 310, "X": 55, "X</w>": 311, "Y": 56, "Y</w>": 312, "Z": 57, "Z</w>": 313, "[": 58, "[ "[...": 39975, "[...]</w>": 43790, "[</w>": 314, "[@</w>": 15148, "[]</w>": 22240, "\\": 59, "\\'</w>": 41239, "\\</w>": 315, "]": 60, "]\"</w>": 39434, "],</w>": 34067, "].</w>": 26262, "]:</w>": 21641, "]</w>": 316, "][ "][</w>": 29329, "^": 61, "^)</w>": 30720, "^-": 43516, "^.": 31552, "^.^</w>": 35791, "^</w>": 317, "^^": 34454, "^^</w>": 9064, "^_": 14423, "^_^</w>": 15995, "_": 62, "_'</w>": 44701, "_(</w>": 36951, "_)</w>": 37393, "_*</w>": 36237, "_,</w>": 31417, "_-</w>": 23193, "_.</w>": 26841, "_/": 37647, "_:</w>": 13109, "_</w>": 318, "__": 2355, "__:</w>": 47043, "__</w>": 3838, "___": 43812, "___</w>": 13530, "____": 4727, "____</w>": 25350, "_____</w>": 38803, "________": 9549, "________________": 20115, "`": 63, "`</w>": 319, "a": 64, "a</w>": 320, "aa": 1821, "aa</w>": 3894, "aaa": 14376, "aaa</w>": 9583, "aaaa": 6727, "aaaa</w>": 19336, "aaaaa</w>": 31095, "aaaaaa</w>": 44413, "aaaaaaaa": 23126, "aaaah</w>": 49151, "aaah</w>": 35856, "aaay</w>": 37846, "aab</w>": 34108, "aac": 23251, "aac</w>": 11346, "aad": 20464, "aad</w>": 35894, "aaf": 37638, "aaf</w>": 31534, "aag": 42174, "aah</w>": 28990, "aaj": 28727, "aaj</w>": 43411, "aak": 37739, "aal": 22268, "aal</w>": 30208, "aali": 27896, "aaliyah</w>": 46577, "aam": 12943, "aam</w>": 22775, "aama": 45018, "aamaadmi": 45563, "aamaadmiparty</w>": 46406, "aamir</w>": 27456, "aan": 20705, "aan</w>": 13426, "aand": 38054, "aap": 12023, "aap</w>": 12052, "aapl</w>": 34516, "aar": 4695, "aar</w>": 13234, "aard": 46932, "aaron": 13948, "aaron</w>": 7709, "aas": 28542, "aas</w>": 32205, "aat": 34018, "aat</w>": 35004, "aau</w>": 35426, "aay": 38281, "aay</w>": 40249, "aaz</w>": 26770, "ab": 596, "ab</w>": 3937, "aba": 44204, "aba</w>": 11102, "abad": 33444, "abad</w>": 7155, "aban": 41662, "aband": 8595, "abandon</w>": 28805, "abandoned</w>": 11227, "abar": 17860, "abar</w>": 39805, "abas": 25402, "abay</w>": 43542, "abb": 38954, "abb</w>": 38297, "abba</w>": 30870, "abbas": 37494, "abbas</w>": 24412, "abbey": 31927, "abbey</w>": 10132, "abbie</w>": 39949, "abbo": 13536, "abbot</w>": 44046, "abbott": 43737, "abbott</w>": 15649, "abbrevi": 44843, "abby": 30586, "abby</w>": 14694, "abc": 13137, "abc</w>": 5334, "abcnews</w>": 31566, "abd</w>": 44093, "abdel": 46511, "abdomin": 35335, "abdominal</w>": 39328, "abdu": 13361, "abduc": 17884, "abducted</w>": 31520, "abduction</w>": 36984, "abdul": 14227, "abdul</w>": 15593, "abdullah</w>": 21317, "abe": 15856, "abe</w>": 12734, "abee</w>": 36037, "abel": 31938, "abel</w>": 25318, "abella</w>": 46156, "aben": 40865, "aber": 7828, "aber</w>": 41867, "aberdeen": 30539, "aberdeen</w>": 17236, "abh": 27484, "abh</w>": 33649, "abhcosmetics</w>": 49189, "abhi": 18113, "abhin": 44045, "abhishek</w>": 44502, "abi": 16867, "abi</w>": 14161, "abia</w>": 48604, "abide</w>": 49163, "abig": 20863, "abigail</w>": 25686, "abil": 21135, "abilities</w>": 8724, "ability": 35146, "ability</w>": 3024, "abit</w>": 48668, "ablanc": 33716, "able": 10102, "able</w>": 863, "abled</w>": 10655, "ableg</w>": 24055, "ables</w>": 8486, "ableton</w>": 47169, "ably</w>": 6748, "abnormal</w>": 40934, "abo": 2889, "abo</w>": 21861, "aboard</w>": 11661, "abol": 31768, "abolic</w>": 46827, "abolish</w>": 47403, "aboo</w>": 42433, "abor": 8416, "aboriginal</w>": 20422, "abortion</w>": 12336, "abortions</w>": 43218, "aboss</w>": 46401, "abou": 36455, "abou</w>": 44053, "abound</w>": 41037, "abour</w>": 46637, "about": 20204, "about</w>": 781, "abouts</w>": 36339, "above": 35019, "above</w>": 4348, "aboy</w>": 37077, "abpoli</w>": 44779, "abq</w>": 38767, "abr": 44932, "abra": 10694, "abra</w>": 35087, "abraham": 40623, "abraham</w>": 15869, "abram": 33255, "abrams</w>": 29852, "abre": 22472, "abre</w>": 46756, "abri": 28605, "abridged</w>": 45333, "abroad</w>": 11253, "abru": 46295, "abs": 18431, "abs</w>": 11109, "absc": 25389, "abscbn": 44260, "abscbn</w>": 45810, "absen": 32453, "absence</w>": 19240, "absent</w>": 30363, "absol": 4624, "absolu": 7055, "absolut": 4666, "absolute</w>": 7501, "absolutely</w>": 4703, "absor": 14303, "absorb</w>": 35806, "absorbed</w>": 45059, "absorbing</w>": 46412, "absorption</w>": 42210, "abstr": 7530, "abstract": 23885, "abstract</w>": 10197, "abstractart</w>": 31170, "abstraction</w>": 47696, "abstracts</w>": 40065, "absur": 21639, "absurd</w>": 29757, "abt</w>": 9850, "abu": 9167, "abu</w>": 11787, "abud": 20180, "abudha": 21450, "abudhabi</w>": 25256, "abuja</w>": 23371, "abun": 20544, "abundance</w>": 23236, "abundant</w>": 31611, "abur": 23377, "aburger</w>": 46660, "abuse</w>": 7678, "abused</w>": 23855, "abuses</w>": 37132, "abusing</w>": 36558, "abusive</w>": 26858, "abv</w>": 34172, "aby": 16342, "aby</w>": 31378, "abyss</w>": 33632, "abz</w>": 42292, "ac": 546, "ac</w>": 2816, "aca</w>": 9213, "acab": 41388, "acacia</w>": 44047, "acad</w>": 32537, "acade": 2892, "academia</w>": 22662, "academic": 31178, "academic</w>": 7935, "academics</w>": 26417, "academies</w>": 42569, "academy": 29968, "academy</w>": 4041, "acadi": 41455, "acadia</w>": 49236, "acam": 26172, "acan": 42227, "acan</w>": 26318, "acap": 32357, "acar": 22232, "acare</w>": 16961, "acc": 26805, "acc</w>": 9318, "acca</w>": 30883, "acce": 8564, "acceler": 10161, "accelerate</w>": 23619, "accelerated</w>": 38513, "accelerating</w>": 41821, "acceleration</w>": 39387, "accelerator</w>": 25261, "accent": 28110, "accent</w>": 18931, "accents</w>": 31738, "accenture</w>": 41853, "accep": 4616, "accept": 16447, "accept</w>": 9338, "acceptable</w>": 14209, "acceptance</w>": 17090, "accepted</w>": 9159, "accepting</w>": 12855, "accepts</w>": 22338, "access": 7596, "access</w>": 3822, "accessi": 10787, "accessibility</w>": 23407, "accessible</w>": 13977, "accessing</w>": 46339, "accessories</w>": 10220, "accessory</w>": 20417, "acci": 4263, "acci</w>": 33943, "accident</w>": 6608, "accidental</w>": 24895, "accidentally</w>": 11061, "accidents</w>": 22072, "acclaimed</w>": 21172, "acco</w>": 44730, "accol": 33858, "accolades</w>": 46731, "accom": 23658, "accommo": 34495, "accommod": 14386, "accommodate</w>": 34708, "accommodation</w>": 18066, "accommodations</w>": 45536, "accomp": 24985, "accompan": 14746, "accompanied</w>": 20715, "accompany</w>": 34142, "accompanying</w>": 38179, "accompli": 10205, "accomplish</w>": 25542, "accomplished</w>": 16462, "accomplishment</w>": 26100, "accomplishments</w>": 24965, "accor": 4182, "accord": 34293, "accord</w>": 28513, "according</w>": 4717, "accordingly</w>": 35535, "accordion</w>": 48760, "accoun": 3081, "account": 18424, "account</w>": 4684, "accountability</w>": 19377, "accountable</w>": 24216, "accountant</w>": 31026, "accountants</w>": 37222, "accounted</w>": 43951, "accounting</w>": 14805, "accounts</w>": 9974, "accra</w>": 31900, "accred": 17451, "accreditation</w>": 27015, "accredited</w>": 27647, "acct</w>": 45569, "accu": 5618, "accumul": 19275, "accumulation</w>": 37112, "accur": 6551, "accuracy</w>": 18423, "accurate</w>": 8858, "accurately</w>": 24206, "accusations</w>": 33615, "accuse</w>": 39414, "accused</w>": 9434, "accuses</w>": 27496, "accusing</w>": 41474, "acdc</w>": 45067, "ace": 2675, "ace</w>": 804, "acea</w>": 35219, "aceae</w>": 38153, "acele": 40868, "aceous</w>": 33610, "acer": 37990, "acer</w>": 25809, "aces</w>": 5725, "acet": 28735, "acf</w>": 38389, "ach": 972, "ach</w>": 987, "acha</w>": 22686, "acharya</w>": 45780, "achat</w>": 32706, "ache": 27771, "ache</w>": 7214, "ached</w>": 17048, "acher": 38442, "acher</w>": 17936, "achers</w>": 25051, "aches</w>": 14823, "achi": 3264, "achi</w>": 9087, "achiev": 8160, "achieve": 14798, "achieve</w>": 8175, "achieved</w>": 12359, "achievement</w>": 8245, "achievements</w>": 16114, "achiever</w>": 46286, "achievers</w>": 44544, "achieves</w>": 40123, "achieving</w>": 16120, "achilles</w>": 33327, "achim</w>": 42335, "aching</w>": 12864, "acho</w>": 33130, "achs</w>": 41195, "aci": 4359, "aci</w>": 34100, "acia</w>": 30163, "acial</w>": 32422, "acid": 35474, "acid</w>": 10085, "acidity</w>": 48800, "acids</w>": 27751, "acies</w>": 20162, "acin": 39442, "acing</w>": 9442, "acio</w>": 26202, "acion": 44965, "acion</w>": 24968, "acional</w>": 26435, "aciones</w>": 35832, "acious</w>": 16020, "acity</w>": 7511, "ación</w>": 38175, "ack": 877, "ack</w>": 725, "acked</w>": 5698, "acker": 31201, "acker</w>": 7940, "ackeray</w>": 41843, "acki</w>": 42857, "acking</w>": 5515, "ackles</w>": 28503, "acknow": 13563, "acknowle": 18100, "acknowledge</w>": 25209, "acknowledged</w>": 35913, "acknowledges</w>": 49083, "acknowledging</w>": 45645, "acks</w>": 3858, "acl": 47593, "acl</w>": 23073, "acle</w>": 6504, "acles</w>": 34164, "aclu</w>": 37354, "acm</w>": 39317, "acmilan</w>": 36500, "acne</w>": 24195, "aco": 9463, "aco</w>": 8800, "acol": 17431, "acollege</w>": 43468, "acom": 17224, "acom</w>": 22342, "acon": 11621, "acon</w>": 11571, "aconf</w>": 38851, "acons</w>": 31599, "acor": 22076, "acorn</w>": 37537, "acos</w>": 39943, "acosta</w>": 31994, "acou": 8794, "acoun": 31295, "acounty</w>": 45449, "acoustic</w>": 10616, "acoustics</w>": 43873, "acp</w>": 19627, "acqu": 7946, "acquainted</w>": 40713, "acqui": 12194, "acquire</w>": 21576, "acquired</w>": 15932, "acquires</w>": 27376, "acquiring</w>": 42785, "acquis": 14207, "acquisition</w>": 16543, "acquisitions</w>": 39649, "acr</w>": 43648, "acre": 26749, "acre</w>": 9493, "acres</w>": 11630, "acro": 21060, "acrob": 40891, "acron": 37770, "across</w>": 2500, "acrosse</w>": 40979, "acruz</w>": 40455, "acry": 10440, "acrylic</w>": 12252, "acs</w>": 11782, "act": 10305, "act</w>": 1393, "acted</w>": 10971, "acti": 4786, "acting</w>": 6319, "action": 12493, "action</w>": 1816, "actions</w>": 6271, "activ": 3430, "activate</w>": 26737, "activated</w>": 22249, "activation</w>": 26769, "active": 19009, "active</w>": 4046, "actively</w>": 18645, "activi": 7230, "activism</w>": 20117, "activist</w>": 10850, "activists</w>": 12649, "activities</w>": 6514, "activity</w>": 6206, "actment</w>": 44807, "acton": 36167, "acton</w>": 36697, "actonclimate</w>": 43797, "actor": 12181, "actor</w>": 4035, "actors</w>": 9255, "actorslife</w>": 25117, "actorvijay</w>": 34033, "actress</w>": 5805, "actresses</w>": 33639, "acts</w>": 6816, "actu": 2375, "actual</w>": 7488, "actually</w>": 2955, "acu": 9204, "acu</w>": 48475, "aculture</w>": 38145, "acup": 30869, "acup</w>": 27278, "acupuncture</w>": 40043, "acur": 44719, "acura</w>": 30120, "acus</w>": 33710, "acute</w>": 19734, "acy": 18717, "acy</w>": 2356, "ad": 594, "ad</w>": 680, "ada": 25785, "ada</w>": 1886, "adaily</w>": 47254, "adal": 46646, "adam": 6037, "adam</w>": 4944, "adamlambert</w>": 27659, "adams</w>": 7942, "adan</w>": 41802, "adani</w>": 37499, "adap": 6341, "adapt</w>": 22666, "adaptation</w>": 16566, "adapted</w>": 26657, "adapter</w>": 21839, "adapting</w>": 44120, "adaptive</w>": 28672, "adar": 27702, "adar</w>": 32681, "adas</w>": 23250, "adata</w>": 39500, "aday": 31367, "aday</w>": 10280, "adays</w>": 24337, "adb</w>": 45630, "adc</w>": 38201, "add": 19408, "add</w>": 3536, "addams</w>": 38912, "added</w>": 4149, "adder</w>": 47557, "addi</w>": 36378, "addic": 5709, "addict</w>": 14614, "addicted</w>": 16275, "addiction</w>": 11751, "addictive</w>": 29638, "addicts</w>": 29997, "adding</w>": 8676, "addis</w>": 43911, "addison</w>": 32369, "additi": 26927, "addition</w>": 6698, "additional</w>": 10666, "additions</w>": 22575, "additive</w>": 48546, "addo</w>": 40001, "address</w>": 5834, "addressed</w>": 20817, "addresses</w>": 12702, "addressing</w>": 10594, "adds</w>": 9944, "addy</w>": 24746, "ade": 2194, "ade</w>": 1928, "adecides</w>": 46374, "aded</w>": 9994, "adee</w>": 47054, "adel": 4434, "adel</w>": 27308, "adelaide": 38193, "adelaide</w>": 11611, "adele": 42843, "adele</w>": 21220, "adelrey</w>": 43627, "ademy</w>": 49123, "aden": 28669, "aden</w>": 28688, "adena</w>": 23648, "adequ": 18232, "adequate</w>": 22281, "ader</w>": 21365, "adero</w>": 49185, "aders</w>": 27672, "ades</w>": 5793, "adh</w>": 42301, "adhd</w>": 32649, "adhe": 21175, "adhesive</w>": 38429, "adi": 2486, "adi</w>": 8779, "adia</w>": 26874, "adic</w>": 36780, "adid": 8086, "adidas": 22396, "adidas</w>": 9589, "adidasoriginals</w>": 48575, "adies</w>": 45834, "adifference</w>": 37217, "adilla</w>": 41167, "ading</w>": 15000, "adio</w>": 15060, "adirond": 36843, "adish</w>": 49009, "adity": 28596, "aditya</w>": 37186, "adityanath</w>": 44437, "adjac": 32517, "adjacent</w>": 33836, "adjec": 45512, "adju": 16413, "adjun": 45995, "adjust": 13784, "adjust</w>": 28073, "adjustable</w>": 20476, "adjusted</w>": 30515, "adjusting</w>": 41132, "adjustment</w>": 36081, "adjustments</w>": 36331, "adl</w>": 49351, "adler</w>": 30222, "adm": 9892, "adm</w>": 33604, "admi": 11666, "admin</w>": 12528, "admini": 6434, "administr": 12174, "administration</w>": 9502, "administrative</w>": 22424, "administrator</w>": 22603, "administrators</w>": 36123, "admins</w>": 49297, "admir": 17031, "admiral</w>": 21013, "admiration</w>": 39569, "admire</w>": 17791, "admired</w>": 36103, "admirer</w>": 48344, "admiring</w>": 29835, "admission</w>": 11315, "admissions</w>": 22463, "admit</w>": 13769, "admits</w>": 16332, "admitted</w>": 20427, "admitting</w>": 46148, "adn</w>": 40339, "adnan</w>": 42037, "ado": 4775, "ado</w>": 2933, "adobe": 29256, "adobe</w>": 16484, "adog</w>": 44913, "adol": 33512, "adole": 22704, "adolescent</w>": 36793, "adolescents</w>": 45656, "adolf</w>": 41179, "adon": 25907, "adona</w>": 48419, "adop": 4183, "adopt": 16441, "adopt</w>": 11159, "adoptable</w>": 36905, "adoptdont": 19674, "adoptdontshop</w>": 20089, "adopted</w>": 12538, "adopting</w>": 30158, "adoption</w>": 11544, "adopts</w>": 40853, "ador": 4992, "ador</w>": 9162, "adora</w>": 40031, "adorable</w>": 6298, "adoration</w>": 46781, "adore</w>": 15502, "adored</w>": 49233, "adores</w>": 30290, "adorned</w>": 44953, "ados</w>": 20079, "adox</w>": 32188, "adp</w>": 44426, "adr</w>": 46189, "adren": 24204, "adrenaline</w>": 35552, "adri": 5935, "adrian": 25012, "adrian</w>": 13163, "adriana</w>": 41363, "adrid</w>": 26562, "adrien</w>": 47469, "adrienne</w>": 40081, "ads</w>": 2485, "adu": 16882, "adu</w>": 24446, "adukone</w>": 30511, "adul": 7222, "adult": 42209, "adult</w>": 7115, "adulthood</w>": 40964, "adults</w>": 9391, "adv": 1647, "adv</w>": 21018, "advan": 33411, "advance": 27291, "advance</w>": 7022, "advanced</w>": 7465, "advancement</w>": 35437, "advances</w>": 15852, "advancing</w>": 21355, "advani</w>": 48189, "advant": 7017, "advantage</w>": 8573, "advantaged</w>": 38361, "advantages</w>": 23506, "adven": 41670, "advent": 3071, "advent</w>": 15199, "adventcalendar</w>": 43492, "adventur": 29627, "adventure": 17251, "adventure</w>": 4377, "adventurer</w>": 48098, "adventures</w>": 7941, "adventurous</w>": 31179, "adver": 4806, "adverse</w>": 30348, "adversity</w>": 32516, "advert</w>": 19080, "adverti": 5682, "advertise</w>": 31473, "advertised</w>": 38987, "advertisement</w>": 18713, "advertiser</w>": 41829, "advertisers</w>": 45472, "advertising</w>": 8158, "adverts</w>": 44306, "advice</w>": 4973, "advis": 4634, "advise</w>": 25962, "advised</w>": 23196, "adviser</w>": 20367, "advisers</w>": 40984, "advises</w>": 42761, "advising</w>": 39648, "advisor</w>": 12380, "advisors</w>": 23197, "advisory</w>": 10224, "advoc": 6657, "advocacy</w>": 14443, "advocate</w>": 12044, "advocates</w>": 17757, "adwords</w>": 48343, "ady": 41446, "ady</w>": 8781, "ae": 5548, "ae</w>": 4542, "aea</w>": 37048, "aed</w>": 26912, "aege": 42304, "ael": 41533, "ael</w>": 43340, "aen": 43085, "aer": 10195, "aeri": 27685, "aerial": 44866, "aerial</w>": 12440, "aero": 10196, "aero</w>": 25026, "aerob": 42824, "aeron": 37286, "aeronau": 42816, "aerop": 27735, "aerosmith</w>": 43253, "aerospace</w>": 20530, "aes": 10617, "aes</w>": 35677, "aest</w>": 40694, "aesthe": 21181, "aesthetic</w>": 16179, "aesthetics</w>": 29295, "aew</w>": 47108, "af": 702, "af</w>": 4391, "afa</w>": 24953, "afan": 47474, "afar": 41637, "afar</w>": 37866, "afb</w>": 27022, "afc": 29742, "afc</w>": 6571, "afcb</w>": 44276, "afcon</w>": 30019, "afd</w>": 44626, "afe": 30487, "afe</w>": 13912, "afer": 44707, "aff": 8849, "aff</w>": 14864, "affair</w>": 13998, "affairs</w>": 9830, "affe": 4556, "affect</w>": 11361, "affected</w>": 9715, "affecting</w>": 18448, "affection": 33780, "affection</w>": 28381, "affectionate</w>": 42578, "affects</w>": 17285, "affili": 12120, "affiliate</w>": 18652, "affiliated</w>": 37540, "affiliation</w>": 48377, "affinity</w>": 41451, "affir": 25343, "affirm": 42711, "affirm</w>": 48625, "affirmation</w>": 47495, "affl": 34036, "affleck</w>": 35584, "afford": 7951, "afford</w>": 13223, "affordability</w>": 44828, "affordable": 43944, "affordable</w>": 8926, "afg</w>": 33994, "afgh": 9029, "afghan</w>": 15919, "afghanistan</w>": 9836, "afi": 24074, "afi</w>": 31958, "afil": 27209, "afire</w>": 42010, "afirst</w>": 38601, "afl": 15132, "afl</w>": 14356, "aflo": 41959, "afm</w>": 38385, "afootball</w>": 41694, "afor": 43102, "afore": 41468, "afp</w>": 18311, "afraid</w>": 9474, "afri": 13888, "afric": 2136, "africa</w>": 3093, "african": 17471, "african</w>": 4736, "africans</w>": 26534, "afridi</w>": 37651, "afrika</w>": 45833, "afrin</w>": 45586, "afro": 16267, "afro</w>": 21795, "afs</w>": 48960, "aft</w>": 22693, "after": 2278, "after</w>": 953, "afterdark</w>": 48966, "afterlife</w>": 46790, "aftermath</w>": 20958, "afterno": 22330, "afternoon": 39035, "afternoon</w>": 2716, "afternoons</w>": 31631, "afterparty</w>": 35305, "afterwards</w>": 23911, "ag": 602, "ag</w>": 5241, "aga": 1050, "aga</w>": 4654, "again</w>": 1495, "against": 23838, "against</w>": 1601, "agame</w>": 46943, "agan": 42946, "agan</w>": 9178, "agar": 13199, "agar</w>": 17544, "agarwal</w>": 43117, "agas": 20430, "agate</w>": 25454, "agatha</w>": 43896, "agave</w>": 42671, "agawa</w>": 39433, "agazine</w>": 44942, "age": 4758, "age</w>": 805, "aged</w>": 3889, "ageing</w>": 25349, "agen": 10101, "agen</w>": 43696, "agencies</w>": 13887, "agency": 44885, "agency</w>": 6270, "agend": 48653, "agenda</w>": 8728, "agent": 21210, "agent</w>": 6576, "agents</w>": 10199, "agentsof": 37074, "agentsofshield</w>": 38801, "ager": 44847, "ager</w>": 10443, "agers</w>": 22123, "ages</w>": 2321, "agg</w>": 45482, "aggarwal</w>": 39386, "agger": 27836, "aggi": 36844, "aggie": 44244, "aggie</w>": 37618, "aggies</w>": 31047, "aggio</w>": 36685, "aggrav": 35203, "aggre": 10426, "aggreg": 41968, "aggregate</w>": 41318, "aggression</w>": 28900, "aggressive</w>": 16295, "aggressively</w>": 48667, "agh": 17917, "agh</w>": 14402, "aghan</w>": 31276, "agi": 24036, "agi</w>": 17645, "agic</w>": 37652, "agile</w>": 16276, "agility</w>": 32161, "aging</w>": 4336, "agio</w>": 41746, "agirl</w>": 35469, "agle": 37035, "agle</w>": 16702, "agles": 36374, "agles</w>": 22679, "aglia</w>": 46912, "agm</w>": 19162, "agn": 36474, "agna</w>": 43626, "agne": 29374, "agne</w>": 48303, "agnes</w>": 26213, "agno": 41540, "ago": 6276, "ago</w>": 1468, "agomez</w>": 27127, "agon": 26775, "agon</w>": 14901, "agony</w>": 36977, "agor": 38920, "agos</w>": 32657, "agov</w>": 34227, "agp</w>": 46048, "agr": 36639, "agra": 26660, "agra</w>": 29830, "agram</w>": 2447, "agre": 3180, "agreat": 37594, "agree</w>": 5953, "agreed</w>": 12774, "agreeing</w>": 40720, "agreement</w>": 8286, "agreements</w>": 25865, "agrees</w>": 17854, "agri": 20527, "agri</w>": 30326, "agricul": 7234, "agricultural</w>": 15440, "agriculture</w>": 9720, "agro": 33178, "agro</w>": 44589, "agron": 41314, "agroup</w>": 40099, "ags</w>": 16926, "agt</w>": 39681, "agu": 3922, "agu</w>": 36544, "agua</w>": 18482, "aguchi</w>": 49206, "ague</w>": 2095, "aguero</w>": 42964, "agues</w>": 7000, "aguil": 27946, "aguilar</w>": 44715, "ah": 1772, "ah</w>": 1288, "aha": 12082, "aha</w>": 8429, "ahah</w>": 38661, "ahaha</w>": 32423, "ahahaha</w>": 42620, "aham": 36036, "ahan": 45061, "ahan</w>": 19255, "ahar": 31038, "ahar</w>": 38760, "ahe": 27688, "ahead</w>": 3158, "ahem</w>": 39995, "ahh</w>": 13152, "ahhh</w>": 14769, "ahhhh</w>": 21054, "ahhhhh</w>": 36392, "ahi": 45349, "ahi</w>": 24154, "ahl</w>": 30433, "ahmad": 32167, "ahmad</w>": 16902, "ahmadi": 38656, "ahmadiyya</w>": 44865, "ahmed": 19491, "ahmed</w>": 12081, "ahmedabad</w>": 26966, "ahn</w>": 33405, "aho": 28114, "aho</w>": 38444, "ahora</w>": 43113, "ahouse</w>": 33197, "ahoy</w>": 38652, "ahs</w>": 16937, "ahu": 11908, "ahu</w>": 16515, "ai": 2014, "ai</w>": 2215, "aia</w>": 27046, "aib</w>": 34780, "aic": 29454, "aid": 13723, "aid</w>": 5182, "aida</w>": 33830, "aidan": 48814, "aidan</w>": 26945, "aide": 31558, "aide</w>": 9746, "aided</w>": 48707, "aiden</w>": 40020, "aides</w>": 49082, "aids</w>": 11759, "aig": 27295, "aig</w>": 46989, "aii</w>": 22478, "aik</w>": 42575, "aiken</w>": 46342, "ail": 1457, "ail</w>": 9154, "ailed</w>": 38919, "ailing</w>": 29999, "ails</w>": 27024, "aim": 6787, "aim</w>": 11255, "aime</w>": 39872, "aimed</w>": 20247, "aimee</w>": 36318, "aiming</w>": 21768, "aimo</w>": 36706, "aims</w>": 13326, "ain": 8326, "ain</w>": 2210, "aine": 48983, "aine</w>": 17634, "ains</w>": 27621, "aint": 29543, "aint</w>": 13099, "ainted</w>": 39933, "aioli</w>": 43949, "air": 1281, "air</w>": 1922, "aira": 35085, "aira</w>": 46444, "airasia</w>": 48020, "airbnb</w>": 23098, "airborne</w>": 22755, "airbus</w>": 15324, "aircraft</w>": 7706, "airdrop</w>": 38434, "aire</w>": 7682, "aired</w>": 21938, "aires</w>": 17034, "airfield</w>": 40525, "airforce</w>": 23511, "airing</w>": 20453, "airline</w>": 14847, "airlines</w>": 8929, "airmen</w>": 44499, "airplane</w>": 16451, "airplanes</w>": 33319, "airplay</w>": 47024, "airpollution</w>": 47362, "airport": 48337, "airport</w>": 3259, "airports</w>": 21543, "airs</w>": 18539, "airshow</w>": 27139, "airsoft</w>": 30134, "airspace</w>": 49280, "airstrikes</w>": 37220, "airtel</w>": 34784, "airtime</w>": 46617, "airwaves</w>": 43910, "airways</w>": 14299, "airy</w>": 44453, "ais": 7616, "ais</w>": 11393, "aise</w>": 30505, "aish": 21946, "aisha</w>": 40211, "aishwar": 29687, "aishwarya</w>": 44019, "aisle</w>": 26917, "ait": 25613, "ait</w>": 40814, "aj": 3990, "aj</w>": 6342, "aja": 42343, "aja</w>": 19633, "ajax</w>": 21933, "ajay": 22494, "ajay</w>": 28726, "ajaydevgn</w>": 35515, "aje": 48818, "aje</w>": 33315, "ajes</w>": 38791, "aji": 26102, "aji</w>": 21153, "ajit</w>": 42261, "ajith</w>": 24118, "ajo</w>": 26958, "aju": 36855, "ak": 819, "ak</w>": 1196, "aka": 19154, "aka</w>": 3412, "akaif</w>": 45736, "akan": 43678, "akan</w>": 38244, "akapoor</w>": 40064, "akarta</w>": 48603, "akb</w>": 41962, "akbar</w>": 27180, "ake": 10558, "ake</w>": 5776, "aked</w>": 6115, "aker": 14245, "aker</w>": 3074, "akers</w>": 5788, "akes</w>": 4764, "akest</w>": 46679, "akh": 14821, "akh</w>": 30660, "akhan</w>": 28158, "akhi": 41660, "akhilesh</w>": 48495, "akhtar</w>": 45458, "aki": 18173, "aki</w>": 6592, "akin": 24630, "akin</w>": 13601, "aking</w>": 1809, "akins</w>": 48568, "akira</w>": 34001, "akis</w>": 27732, "akistan</w>": 46221, "akley</w>": 39908, "ako": 44027, "ako</w>": 14541, "akon</w>": 47105, "akos</w>": 44659, "akrish": 37434, "akron</w>": 26115, "aks</w>": 2953, "aksh": 28226, "akshay": 21483, "akshay</w>": 38914, "akshaykumar</w>": 23624, "akshi</w>": 42634, "aku": 18151, "aku</w>": 20815, "aky</w>": 11977, "al": 526, "al</w>": 566, "ala": 12783, "ala</w>": 3449, "alab": 6365, "alabam": 45880, "alabama</w>": 8422, "alach": 24622, "alad": 23074, "aladdin</w>": 29951, "alai</w>": 47072, "alain</w>": 28999, "alam": 16612, "alam</w>": 16012, "alamo": 41922, "alamo</w>": 34632, "alan": 9563, "alan</w>": 5773, "alana</w>": 43405, "aland": 34304, "aland</w>": 6819, "alar": 34333, "alarm</w>": 11321, "alarming</w>": 37209, "alarms</w>": 31236, "alarts</w>": 31422, "alas": 7276, "alas</w>": 22412, "alaska</w>": 9562, "alaskan</w>": 33898, "alastair</w>": 42062, "alay": 30289, "alay</w>": 36450, "alaya</w>": 36397, "alb": 45248, "alba</w>": 25254, "alban": 10882, "albania</w>": 29170, "albanian</w>": 47721, "albans</w>": 44119, "albany</w>": 17359, "albat": 42797, "albeit</w>": 38984, "alber": 6413, "albert": 34174, "albert</w>": 9507, "alberta</w>": 11048, "alberto</w>": 22714, "albi": 18512, "albino</w>": 48062, "albion</w>": 24071, "albu": 2216, "album": 40712, "album</w>": 2431, "albums</w>": 10705, "albuquerque</w>": 31079, "alcat": 35361, "alche": 37909, "alchemist</w>": 38913, "alchemy</w>": 39501, "alco": 6848, "alco</w>": 45446, "alcohol</w>": 9426, "alcoholic</w>": 25098, "ald": 4539, "ald</w>": 2928, "alda</w>": 46440, "alde": 33114, "alden": 17155, "alden</w>": 27710, "aldenrichards</w>": 20051, "alder": 18220, "alder</w>": 46571, "aldi</w>": 23204, "aldo</w>": 9933, "aldridge</w>": 38084, "alds</w>": 14285, "aldu": 6505, "aldub": 10532, "aldub</w>": 15247, "ale": 1440, "ale</w>": 1336, "alea</w>": 26518, "aleague</w>": 38909, "alec": 29804, "alec</w>": 19954, "alecoscino</w>": 47948, "aled</w>": 4970, "alee</w>": 24515, "alej": 23440, "alejandro</w>": 32950, "alek": 26906, "alek</w>": 43310, "aleksand": 48429, "alem</w>": 11825, "aleppo</w>": 19258, "aler": 25674, "aler</w>": 27335, "alert</w>": 4662, "alerts</w>": 22144, "ales": 44171, "ales</w>": 5962, "aless": 21864, "alessandro</w>": 37344, "alestine</w>": 31945, "alex": 2959, "alex</w>": 4134, "alexa</w>": 16273, "alexand": 10696, "alexander": 25527, "alexander</w>": 7563, "alexandra</w>": 19054, "alexandre</w>": 35711, "alexandria</w>": 21171, "alexis": 35023, "alexis</w>": 14243, "aley</w>": 21635, "alf</w>": 27098, "alfa</w>": 23482, "alfar": 38870, "alfie</w>": 28598, "alfon": 31947, "alfonso</w>": 41784, "alfre": 20982, "alfred</w>": 16553, "alfredo</w>": 32291, "algae</w>": 25654, "algar": 36291, "algarve</w>": 40290, "alge": 24336, "algebra</w>": 33694, "alger": 18568, "algeria</w>": 25257, "algon": 33007, "algori": 14912, "algorithm</w>": 23295, "algorithms</w>": 26039, "alham": 23352, "alhamdulil": 35129, "alhamdulillah</w>": 38982, "ali": 835, "ali</w>": 3558, "alia</w>": 2492, "aliaa</w>": 36468, "alian</w>": 3464, "alias</w>": 40026, "alibaba</w>": 39231, "alic": 25265, "alice": 23759, "alice</w>": 9192, "alici": 31630, "alicia</w>": 20914, "alie</w>": 8697, "alien": 22846, "alien</w>": 9639, "aliens</w>": 14883, "alier</w>": 39493, "alies</w>": 38086, "alife": 41347, "alife</w>": 21100, "alig": 21272, "alight</w>": 36157, "align</w>": 31160, "aligned</w>": 29292, "alignment</w>": 27267, "alik</w>": 31141, "alike</w>": 12665, "alim": 42075, "alin": 42746, "alin</w>": 40063, "alina</w>": 39529, "aline</w>": 21799, "aling</w>": 5169, "alion</w>": 19049, "alis": 21308, "alis</w>": 20114, "alisa</w>": 38918, "alisation</w>": 42143, "alise</w>": 36718, "alised</w>": 25099, "alism</w>": 5607, "alison": 28653, "alison</w>": 16970, "alist": 44900, "alist</w>": 3320, "alistair</w>": 40551, "alistic</w>": 22302, "alists</w>": 5653, "alit": 45566, "alities</w>": 27925, "ality</w>": 1694, "alive": 40467, "alive</w>": 4716, "aliz": 30979, "alization</w>": 8026, "alize</w>": 10268, "alized</w>": 6141, "alizer</w>": 38922, "alizes</w>": 26181, "alizing</w>": 13023, "alk": 30246, "alk</w>": 21577, "alkal": 33450, "alkaline</w>": 39210, "all": 813, "all</w>": 615, "alla": 13884, "alla</w>": 14000, "allabout": 43996, "allah</w>": 6378, "allan": 36552, "allan</w>": 15404, "allblacks</w>": 47728, "allday</w>": 35862, "alle": 4870, "alle</w>": 29478, "alled</w>": 7379, "alleg": 7456, "allegations</w>": 16992, "alleged</w>": 12133, "allegedly</w>": 14177, "alleges</w>": 45051, "allegh": 41479, "allegheny</w>": 47851, "allegi": 28832, "allegiance</w>": 30955, "allen": 16712, "allen</w>": 6386, "allenge</w>": 31387, "aller": 10116, "aller</w>": 30630, "allergic</w>": 28809, "allergies</w>": 28247, "allergy</w>": 24408, "allery</w>": 32542, "alles</w>": 43354, "allevi": 31682, "alleviate</w>": 44799, "alley": 36205, "alley</w>": 10329, "allez</w>": 49137, "alli": 4123, "alli</w>": 15268, "alliance": 45404, "alliance</w>": 8945, "alliances</w>": 48403, "allianz</w>": 45740, "allie</w>": 25040, "allied</w>": 20045, "allies</w>": 17277, "alligator</w>": 28574, "allin": 45007, "allin</w>": 22395, "alline</w>": 48182, "alling</w>": 2992, "allis</w>": 45309, "allison": 34602, "allison</w>": 16578, "allman</w>": 42611, "allo": 8107, "allo</w>": 18389, "allocated</w>": 42716, "allocation</w>": 35139, "allon</w>": 46693, "allot": 26363, "allotment</w>": 33750, "allow": 5645, "allow</w>": 6722, "allowance</w>": 35696, "allowed</w>": 7885, "allowing</w>": 12458, "allows</w>": 9966, "alloy</w>": 22467, "alls</w>": 1997, "allstar": 31247, "allstar</w>": 22974, "allstars</w>": 31198, "allthe": 29253, "allu": 20157, "alluarjun</w>": 39333, "allure</w>": 41814, "ally": 7461, "ally</w>": 769, "alm</w>": 28303, "alma": 32933, "alma</w>": 18337, "alman": 29394, "almanac</w>": 41268, "almighty</w>": 21898, "almond</w>": 15646, "almonds</w>": 30468, "almost": 47534, "almost</w>": 2671, "aln": 47203, "alo": 3435, "alo</w>": 6183, "aloe</w>": 30728, "alog</w>": 15813, "alogue</w>": 9101, "aloha</w>": 23160, "aloils</w>": 49002, "alom</w>": 22236, "alon": 14097, "alon</w>": 42846, "alone</w>": 4702, "along": 8300, "along</w>": 2528, "alongside</w>": 8646, "alonso</w>": 25704, "aloo</w>": 46187, "alore</w>": 14323, "alot</w>": 16945, "alou": 43180, "aloud</w>": 30028, "alove": 46669, "alove</w>": 37045, "alp": 32020, "alp</w>": 39342, "alpac": 30128, "alpaca</w>": 42561, "alph": 6720, "alpha": 11807, "alpha</w>": 8624, "alphabe": 45796, "alphabet</w>": 22335, "alphon": 37865, "alpine</w>": 17055, "alps</w>": 18191, "already</w>": 2426, "alright</w>": 10866, "als": 23982, "als</w>": 938, "alsace</w>": 49388, "also</w>": 1446, "alt": 9995, "alt</w>": 10006, "alta": 24470, "alta</w>": 25378, "altaf</w>": 47342, "altam": 45624, "altar</w>": 16385, "alter": 4949, "alter</w>": 21393, "altered</w>": 25201, "altern": 47463, "alternate</w>": 15926, "alternati": 16699, "alternative": 37327, "alternative</w>": 8248, "alternatives</w>": 25041, "alth": 23463, "alth</w>": 5863, "although</w>": 9421, "alti": 35531, "alties</w>": 17276, "altitude</w>": 23241, "altman</w>": 48100, "alto": 35053, "alto</w>": 17518, "altogether</w>": 45689, "alton": 41331, "alton</w>": 36550, "altrin": 38458, "altrincham</w>": 44718, "alty</w>": 5546, "alu": 4776, "alu</w>": 27991, "alum": 5404, "alum</w>": 10553, "alumin": 14563, "alumini": 22908, "aluminium</w>": 23631, "aluminum</w>": 15251, "alumna</w>": 30313, "alumni</w>": 6646, "alumnus</w>": 23633, "alums</w>": 30155, "alv": 20928, "alvar": 25196, "alvarez</w>": 26924, "alvaro</w>": 41941, "alves</w>": 38547, "alvin</w>": 27023, "alway": 14046, "alway</w>": 43764, "always": 24997, "always</w>": 1466, "alwx</w>": 32768, "aly": 6468, "aly</w>": 12910, "alyn": 49150, "alyss": 29490, "alyssa</w>": 18898, "alz": 12936, "alz</w>": 41128, "alzheim": 15212, "alzheimer</w>": 21151, "alzheimers</w>": 34592, "am": 548, "am</w>": 687, "ama": 18206, "ama</w>": 1696, "amad": 45095, "amade": 37366, "amag": 32049, "amal": 15315, "amal</w>": 36753, "aman": 19890, "aman</w>": 10110, "amand": 14560, "amanda</w>": 10036, "amar": 6424, "amar</w>": 19607, "amara</w>": 48522, "amari": 42565, "amarillo</w>": 40449, "amarine</w>": 45591, "amarketing</w>": 30788, "amas": 22716, "amas</w>": 15667, "amat": 38664, "amat</w>": 25455, "amate": 12453, "amateur</w>": 14287, "amaya</w>": 47210, "amaz": 1185, "amaze</w>": 24846, "amazed</w>": 18944, "amazing": 15949, "amazing</w>": 1370, "amazingly</w>": 20368, "amazon": 13630, "amazon</w>": 4140, "amb": 9042, "amb</w>": 16853, "amba</w>": 27003, "ambani</w>": 45967, "ambas": 5634, "ambassad": 5758, "ambassador</w>": 6795, "ambassadors</w>": 16832, "ambed": 42089, "ambedkar</w>": 48131, "amber": 18292, "amber</w>": 9986, "ambi": 11844, "ambient</w>": 23447, "ambigu": 35702, "ambition</w>": 20673, "ambitions</w>": 34152, "ambitious</w>": 18666, "ambro": 17585, "ambrose</w>": 24253, "ambu": 34423, "ambul": 13944, "ambulance</w>": 15555, "ambush</w>": 40725, "amc": 24942, "amc</w>": 16921, "amd</w>": 20845, "ame": 3995, "ame</w>": 780, "amed</w>": 5660, "ameen</w>": 24229, "amel": 31988, "amel</w>": 10960, "ameli": 21599, "amelia</w>": 21433, "amell</w>": 48198, "amen": 18716, "amen</w>": 12335, "amend": 12425, "amendment</w>": 15019, "amendments</w>": 40901, "amenities</w>": 30096, "ament</w>": 27528, "amer": 17081, "amer</w>": 16147, "ameri": 40422, "americ": 1283, "america</w>": 2224, "americafirst</w>": 43216, "american": 8746, "american</w>": 2151, "americana</w>": 26221, "americanair</w>": 42538, "americani": 39726, "americans</w>": 6676, "americas": 33343, "americas</w>": 18142, "ames</w>": 5469, "ameter</w>": 23393, "amethy": 30291, "amethyst</w>": 31485, "amex</w>": 46390, "amg</w>": 21324, "amher": 32311, "amherst</w>": 39065, "ami": 6100, "ami</w>": 3065, "amic": 25824, "amic</w>": 21383, "amid": 18908, "amid</w>": 11953, "amide</w>": 30952, "amidst</w>": 25172, "amie</w>": 36901, "amig": 40294, "amiga</w>": 35329, "amigo</w>": 44991, "amigos</w>": 28176, "amii": 35462, "amiibo</w>": 38871, "amily</w>": 36732, "amin": 14337, "amin</w>": 20235, "amina</w>": 47531, "amination</w>": 30355, "amine</w>": 35823, "aming</w>": 3507, "amino</w>": 33464, "amir": 26029, "amir</w>": 21973, "amis</w>": 29829, "amish</w>": 24958, "amit": 15083, "amit</w>": 25255, "amitabh</w>": 48124, "amitshah</w>": 32374, "aml</w>": 43185, "amma</w>": 29786, "amman</w>": 29243, "ammo</w>": 33474, "ammunition</w>": 35060, "amn</w>": 24073, "amne": 14596, "amnesia</w>": 41741, "amnesty": 46330, "amnesty</w>": 21177, "amo": 4833, "amo</w>": 11156, "amodi</w>": 9826, "amon": 17492, "amon</w>": 24046, "among": 12310, "among</w>": 4265, "amongst</w>": 12520, "amoo</w>": 26977, "amor": 19977, "amor</w>": 15973, "amore": 38937, "amore</w>": 22691, "amores</w>": 36338, "amos</w>": 18133, "amoto</w>": 25492, "amount</w>": 6403, "amounts</w>": 16747, "amour</w>": 29908, "amovie</w>": 41062, "amp": 3521, "amp</w>": 6259, "amped</w>": 22640, "amphi": 16379, "amphibious</w>": 45206, "amphitheater</w>": 41285, "amphitheatre</w>": 44039, "ample</w>": 34162, "amples</w>": 14536, "ampli": 15647, "amplifier</w>": 31743, "amplify</w>": 45308, "amps</w>": 19252, "ampton": 29410, "ampton</w>": 9347, "amr</w>": 30916, "amreading</w>": 16546, "amrit": 33849, "ams</w>": 1396, "amster": 9110, "amsterdam</w>": 9441, "amtrak</w>": 27855, "amu": 11347, "amu</w>": 32336, "amur": 35014, "amura</w>": 35487, "amus</w>": 36269, "amuse": 21421, "amuse</w>": 44367, "amused</w>": 30212, "amusement</w>": 32570, "amusic</w>": 20266, "amusing</w>": 31789, "amwriting</w>": 9660, "amy": 10547, "amy</w>": 5187, "an": 514, "an</w>": 550, "ana": 6588, "ana</w>": 1388, "anab": 34742, "anada</w>": 27948, "anag": 12115, "anagh</w>": 40774, "anaheim</w>": 23728, "anak": 34814, "anak</w>": 38658, "anal": 2785, "analo": 34179, "analog</w>": 19963, "analogue</w>": 46031, "analy": 4611, "analyse</w>": 47246, "analyses</w>": 39695, "analysis</w>": 5296, "analyst</w>": 14198, "analysts</w>": 28075, "analytical</w>": 34550, "analytics</w>": 8558, "analyze</w>": 28519, "analyzing</w>": 32107, "anam": 29525, "anan": 37215, "anand": 25073, "anand</w>": 22083, "anap": 41566, "anarch": 46405, "anarchi": 39879, "anarchy</w>": 27707, "anas": 31382, "anas</w>": 12633, "anast": 48902, "anasta": 22915, "anastasi": 36534, "anastasia</w>": 37975, "anat": 10045, "anath</w>": 31277, "anatom": 33759, "anatomy</w>": 15376, "anc": 1124, "anc</w>": 17758, "anca</w>": 14583, "ance": 7165, "ance</w>": 884, "anced</w>": 5071, "ancer</w>": 17415, "ancers</w>": 37296, "ances</w>": 3515, "ancestor</w>": 43904, "ancestors</w>": 24405, "ancestral</w>": 41615, "ancestry</w>": 30922, "anch": 9489, "anche</w>": 34679, "ancho": 26610, "anchor": 20030, "anchor</w>": 13201, "anchorage</w>": 31950, "anchored</w>": 45926, "anchors</w>": 37830, "anci": 4192, "ancient": 31495, "ancient</w>": 5810, "ancies</w>": 21647, "ancing</w>": 7797, "anco</w>": 15459, "ancy": 16282, "ancy</w>": 3633, "and": 672, "and</w>": 537, "anda</w>": 2911, "andalu": 31443, "andco</w>": 36302, "ande": 26889, "ande</w>": 30354, "ander": 3740, "ander</w>": 3935, "anders</w>": 10880, "andersen</w>": 32661, "anderson": 26683, "anderson</w>": 6510, "andes</w>": 24052, "andfriends</w>": 36871, "andhi</w>": 21617, "andhra</w>": 32452, "andi": 28870, "andi</w>": 14354, "andie</w>": 46318, "andme</w>": 42831, "ando": 35950, "ando</w>": 5986, "andolan</w>": 48965, "andon</w>": 36488, "andor": 45243, "andover</w>": 44177, "andr": 22661, "andra": 46795, "andra</w>": 21730, "andre": 2657, "andre</w>": 9400, "andrea</w>": 10895, "andreas</w>": 20444, "andrei</w>": 42137, "andres</w>": 25197, "andretti</w>": 44291, "andrew": 11717, "andrew</w>": 4847, "andrews</w>": 14506, "andri": 37208, "andro": 4417, "andro</w>": 17980, "android": 24284, "android</w>": 5191, "androidgames</w>": 46572, "andromeda</w>": 42942, "andré</w>": 35609, "ands</w>": 32257, "andthe": 22111, "andu": 44200, "andum</w>": 47266, "andy": 9447, "andy</w>": 2888, "ane": 5846, "ane</w>": 3051, "anec": 33965, "anem": 41395, "anemone</w>": 49019, "aneous</w>": 48273, "anes</w>": 15381, "anese</w>": 48778, "anesthe": 30622, "anesthesia</w>": 43353, "anew": 39084, "anew</w>": 47341, "anews</w>": 20919, "aney</w>": 22387, "anfield</w>": 26993, "ang": 883, "ang</w>": 2704, "anga</w>": 11641, "angames</w>": 43178, "angan</w>": 28264, "angas</w>": 46180, "ange": 2960, "ange</w>": 3039, "angel": 5029, "angel</w>": 5130, "angela</w>": 12354, "angeles</w>": 7382, "angeli": 15265, "angelic</w>": 41038, "angelica</w>": 38582, "angelina</w>": 28890, "angelo</w>": 14342, "angelou</w>": 41328, "angels</w>": 7809, "anger": 32737, "anger</w>": 6788, "angerous</w>": 39716, "angers</w>": 29756, "angh": 34030, "angi": 28003, "angi</w>": 24301, "angie</w>": 18859, "angle": 21749, "angle</w>": 6946, "angled</w>": 32322, "angler</w>": 22284, "anglers</w>": 41608, "angles</w>": 18627, "anglesey</w>": 31850, "anglia</w>": 32076, "anglic": 28322, "anglican</w>": 33284, "angling</w>": 36824, "anglo": 39515, "anglo</w>": 30408, "ango</w>": 19090, "angola</w>": 36636, "angor</w>": 41740, "angp</w>": 19992, "angry": 33910, "angry</w>": 9054, "angs</w>": 18441, "angst</w>": 41714, "angu": 11209, "angular": 43584, "angular</w>": 24981, "angularjs</w>": 48608, "angus</w>": 19688, "ani": 1326, "ani</w>": 3624, "ania</w>": 9866, "anian</w>": 9945, "anians</w>": 39393, "anic</w>": 23113, "anie": 26697, "anie</w>": 7671, "anil": 28589, "anil</w>": 34619, "anim": 2190, "animal": 10697, "animal</w>": 4668, "animalrights</w>": 42859, "animals</w>": 4995, "animate</w>": 40076, "animated</w>": 13360, "animation</w>": 10344, "animations</w>": 42870, "animator</w>": 42591, "anime": 23314, "anime</w>": 6469, "anin</w>": 45735, "aning</w>": 30972, "anir": 27089, "anirud": 35278, "anirudhofficial</w>": 45917, "anis": 40986, "anis</w>": 47556, "anism</w>": 20947, "anist</w>": 16729, "anistan</w>": 9727, "aniston</w>": 47344, "anit": 23683, "anita</w>": 18544, "anium</w>": 14794, "anj": 22443, "anja</w>": 43440, "anjali</w>": 38834, "anjo</w>": 47353, "ank": 13339, "ank</w>": 10029, "anka</w>": 45324, "ankara</w>": 34309, "ankle</w>": 14777, "ankles</w>": 48688, "ann": 850, "ann</w>": 5424, "anna": 13821, "anna</w>": 2160, "annab": 22336, "annabelle</w>": 47661, "annah": 39166, "annah</w>": 14327, "annak": 41720, "annan</w>": 32166, "annapolis</w>": 34491, "annas": 48467, "anne": 9139, "anne</w>": 4083, "anned</w>": 27352, "anner</w>": 12642, "annes</w>": 24343, "annette</w>": 36821, "annex": 42958, "annex</w>": 46389, "anni": 2438, "anni</w>": 13728, "annie": 37270, "annie</w>": 12173, "annies</w>": 43184, "annihil": 32734, "annis</w>": 24742, "anniv</w>": 31399, "anniver": 29671, "annivers": 42836, "anniversaire</w>": 30882, "anniversary</w>": 3048, "anno": 9901, "anno</w>": 26871, "annon</w>": 26385, "annot": 30411, "announ": 1806, "announce</w>": 3682, "announced</w>": 4103, "announcement</w>": 6932, "announcements</w>": 23735, "announcer</w>": 33626, "announces</w>": 6500, "announcing</w>": 11593, "annoy</w>": 45138, "annoyed</w>": 29863, "annoying</w>": 15248, "annu": 21698, "annual</w>": 2906, "annually</w>": 23703, "anny": 34313, "anny</w>": 5291, "ano": 5617, "ano</w>": 2658, "anom": 21612, "anomaly</w>": 46811, "anon": 47079, "anon</w>": 13667, "anonym": 38605, "anonymous</w>": 15036, "anoo": 25690, "anor": 13243, "anor</w>": 16596, "anos</w>": 20132, "another": 29274, "another</w>": 1380, "anova</w>": 24116, "ans": 24586, "ans</w>": 885, "ansari</w>": 40748, "ansel": 40356, "answ": 3369, "answe": 14391, "answer</w>": 4518, "answered</w>": 14499, "answering</w>": 18280, "answers</w>": 8692, "ant": 1103, "ant</w>": 773, "anta</w>": 3023, "antag": 41745, "antal": 39355, "antalya</w>": 47440, "antan": 32899, "antarc": 21338, "antarctic</w>": 27077, "antarctica</w>": 22587, "ante": 19311, "ante</w>": 9769, "antebellum</w>": 41683, "antelope</w>": 39177, "anten": 35517, "antenna</w>": 26370, "anter": 46508, "antes</w>": 14927, "antgrasso</w>": 39074, "anth": 3737, "anth</w>": 29741, "antha</w>": 47981, "anthe": 34167, "anthem</w>": 12504, "anthi</w>": 45261, "anthology</w>": 21009, "anthony": 17477, "anthony</w>": 6113, "anthro": 10019, "anthropo": 18538, "anthropology</w>": 32407, "anthus</w>": 37639, "anti": 3120, "anti</w>": 3564, "antibio": 18954, "antibiotic</w>": 34387, "antibiotics</w>": 29499, "antibody</w>": 49018, "antic": 8260, "anticip": 11435, "anticipate</w>": 38280, "anticipated</w>": 18605, "anticipating</w>": 48067, "anticipation</w>": 26983, "antics</w>": 37126, "antidote</w>": 45476, "antifa</w>": 35926, "antigua</w>": 39910, "antine</w>": 17641, "antino</w>": 27818, "antioxid": 23010, "antioxidant</w>": 37452, "antioxidants</w>": 34208, "antiqu": 21745, "antique": 46517, "antique</w>": 9060, "antiques</w>": 17365, "antis</w>": 19748, "antisemitism</w>": 36630, "antit": 37833, "antitrust</w>": 49343, "antlers</w>": 47720, "antly</w>": 5265, "anto": 16826, "anto</w>": 24486, "antoine</w>": 25188, "anton": 5497, "anton</w>": 19644, "antoni": 39958, "antonio": 30497, "antonio</w>": 7842, "antony</w>": 30707, "antrim</w>": 40252, "ants</w>": 1589, "antv</w>": 47520, "antw": 44460, "antwer": 26970, "antwerp</w>": 33797, "antz</w>": 25684, "anu": 8537, "anu</w>": 17152, "anup": 29617, "anus</w>": 27084, "anush": 22765, "anushka": 42080, "anushka</w>": 39822, "anushkasharma</w>": 44203, "anwar</w>": 34261, "anxi": 9021, "anxiety</w>": 11103, "anxious</w>": 27793, "any": 1307, "any</w>": 1504, "anya</w>": 11173, "anybody</w>": 10071, "anyi</w>": 41632, "anymore</w>": 7372, "anyone</w>": 2302, "anything</w>": 3582, "anytime</w>": 13924, "anyway</w>": 8931, "anyways</w>": 19778, "anywhere</w>": 8863, "anz": 14445, "anz</w>": 19425, "anza</w>": 14669, "anzac</w>": 31977, "ao": 7313, "ao</w>": 5703, "aoa</w>": 47119, "aoc</w>": 31918, "aofficial</w>": 30840, "aoki</w>": 33602, "aol</w>": 40643, "aon": 30928, "aon</w>": 48476, "aor": 32044, "aos</w>": 46860, "ap": 688, "ap</w>": 2728, "apa": 36954, "apa</w>": 13537, "apac</w>": 34320, "apache</w>": 23921, "apal": 38017, "apan</w>": 36562, "apar": 9161, "apark</w>": 32528, "apart": 6474, "apart</w>": 7803, "aparthe": 25121, "apartheid</w>": 26597, "apartment</w>": 8285, "apartments</w>": 15791, "aparty</w>": 26767, "apat": 31755, "apathy</w>": 18145, "apc</w>": 20300, "apd</w>": 44563, "ape": 6098, "ape</w>": 2609, "apec</w>": 47530, "aper": 13681, "aper</w>": 5858, "apers</w>": 15846, "apes</w>": 9550, "apeu": 19040, "apex": 41935, "apex</w>": 23712, "aph": 16341, "aph</w>": 29491, "apha</w>": 47104, "apho": 21758, "aphra": 44147, "api": 23342, "api</w>": 14674, "apia</w>": 44259, "apic": 40679, "aping</w>": 18456, "apink</w>": 35725, "apis</w>": 37575, "apk</w>": 27648, "apo": 4089, "apo</w>": 19758, "apocaly": 13932, "apocalypse</w>": 17571, "apocalyptic</w>": 35675, "apol": 5023, "apolice</w>": 45663, "apolis</w>": 9598, "apollo": 48213, "apollo</w>": 11554, "apolo": 31094, "apolog": 25530, "apologe": 42908, "apologi": 14977, "apologies</w>": 21959, "apologise</w>": 39608, "apologize</w>": 22879, "apologizes</w>": 35298, "apology</w>": 20768, "apor": 21871, "apore</w>": 6679, "apost": 20309, "apostle</w>": 33051, "apostles</w>": 48457, "app": 882, "app</w>": 2231, "appa": 4884, "appa</w>": 13110, "appalach": 30523, "appalachian</w>": 36806, "appalling</w>": 44797, "appar": 26698, "apparatus</w>": 37716, "apparel</w>": 13972, "apparent</w>": 23963, "apparently</w>": 5287, "appe": 3748, "appe</w>": 45949, "appeal</w>": 9625, "appealing</w>": 25909, "appeals</w>": 22447, "appear": 5544, "appear</w>": 9308, "appearance</w>": 7238, "appearances</w>": 17214, "appeared</w>": 11561, "appearing</w>": 18759, "appears</w>": 8743, "appell": 43833, "appen": 37201, "appen</w>": 26589, "apper</w>": 18780, "appet": 21686, "appeti": 24179, "appetite</w>": 24481, "appetizer</w>": 36065, "applau": 24713, "applaud</w>": 42152, "applause</w>": 22650, "apple": 8629, "apple</w>": 3055, "applemusic</w>": 21390, "apples</w>": 14032, "appleton</w>": 45250, "appli": 15495, "appliance</w>": 33677, "appliances</w>": 22134, "applic": 4235, "applicable</w>": 37927, "applicants</w>": 28035, "application</w>": 7241, "applications</w>": 7341, "applied</w>": 12636, "applies</w>": 24910, "apply</w>": 4356, "applying</w>": 17965, "appo": 5433, "appoint</w>": 36190, "appointed</w>": 11087, "appointment</w>": 10890, "appointments</w>": 23439, "appoints</w>": 25132, "apprais": 36972, "appraisal</w>": 46108, "appreci": 3474, "appreciate</w>": 6263, "appreciated</w>": 9264, "appreciates</w>": 36573, "appreciating</w>": 39352, "appreciation</w>": 9212, "appreciationday</w>": 37438, "appreciative</w>": 45074, "appren": 10582, "apprentic": 15662, "apprentice": 19122, "apprentice</w>": 17985, "apprentices</w>": 38252, "apprenticeship</w>": 26939, "apprenticeships</w>": 35425, "appro": 2398, "approach": 7781, "approach</w>": 6241, "approached</w>": 36499, "approaches</w>": 14962, "approaching</w>": 12164, "appropri": 8446, "appropriate</w>": 10768, "appropriately</w>": 30383, "appropriation</w>": 49110, "approval</w>": 13549, "approve</w>": 19064, "approved</w>": 9412, "approves</w>": 18107, "approx</w>": 18266, "approxim": 14201, "approximately</w>": 16128, "apps</w>": 7020, "appstore</w>": 31377, "appt</w>": 48112, "appy</w>": 34420, "apr": 39396, "apr</w>": 11177, "apra": 37027, "apric": 25923, "apricot</w>": 30815, "april": 23548, "april</w>": 2484, "apro": 42712, "apro</w>": 49051, "apron</w>": 29502, "aps</w>": 8868, "apse</w>": 31843, "apt</w>": 17921, "aptly</w>": 47313, "apu": 22166, "apur": 36900, "apur</w>": 45193, "aq": 14018, "aq</w>": 26862, "aqu": 4458, "aqua</w>": 18613, "aquaculture</w>": 41885, "aquaman</w>": 35098, "aquari": 37605, "aquarium</w>": 16814, "aquarius</w>": 38879, "aquatic</w>": 22658, "aque": 35927, "aque</w>": 37268, "aqui": 36826, "aquino</w>": 33796, "ar": 516, "ar</w>": 625, "ara": 24161, "ara</w>": 3340, "arab": 5405, "arab</w>": 12028, "arabia</w>": 11746, "arabian</w>": 24663, "arabic</w>": 16709, "arabs</w>": 39155, "arac": 47620, "arach": 37689, "arag": 41502, "araj</w>": 45142, "arak": 23416, "aram": 19223, "aram</w>": 21473, "arama</w>": 49066, "aran": 20839, "aran</w>": 19641, "aras</w>": 36399, "arat</w>": 30856, "arav": 35836, "arbit": 20267, "arbitr": 22702, "arbitration</w>": 34845, "arbor": 33516, "arbor</w>": 24878, "arboretum</w>": 41719, "arc": 4997, "arc</w>": 11592, "arca": 25189, "arca</w>": 37612, "arcade</w>": 13331, "arcadia</w>": 38372, "arch": 2458, "arch</w>": 8557, "archa": 45619, "archae": 10121, "archaeological</w>": 26163, "archaeologists</w>": 45035, "archaeology</w>": 14868, "archan": 33359, "archbishop</w>": 23994, "arche": 22474, "archer</w>": 21824, "archers</w>": 38407, "archery</w>": 23935, "arches</w>": 30771, "archi": 4479, "archie</w>": 20557, "archipel": 39750, "archipelago</w>": 43025, "architec": 3359, "architect</w>": 12192, "architects</w>": 13290, "architectural</w>": 15360, "architecture": 39038, "architecture</w>": 4920, "archival</w>": 39249, "archive": 42257, "archive</w>": 10548, "archived</w>": 42379, "archives</w>": 9411, "archy</w>": 15643, "arctic": 29716, "arctic</w>": 9138, "ard": 3793, "ard</w>": 746, "arden": 44600, "arden</w>": 27057, "ardi": 23932, "ardi</w>": 19837, "ardo": 35735, "ardo</w>": 9394, "ards</w>": 1654, "ardu": 20906, "arduino</w>": 25398, "are": 1076, "are</w>": 631, "area</w>": 2445, "areas</w>": 5429, "arec": 18136, "areclipse</w>": 36030, "ared</w>": 5369, "arel</w>": 12798, "arella</w>": 24784, "arelli</w>": 48619, "aren": 4033, "aren</w>": 4318, "arena</w>": 5463, "arenas</w>": 47860, "arent</w>": 37487, "arer</w>": 14857, "arers</w>": 33159, "ares</w>": 12224, "arest</w>": 11708, "aret</w>": 22247, "areth</w>": 47725, "aretha</w>": 42090, "areyou": 37607, "arez</w>": 13108, "arg</w>": 27285, "argent": 7812, "argentina</w>": 9789, "argentine</w>": 32582, "argon": 40737, "argos</w>": 37443, "argu": 7440, "arguably</w>": 30899, "argue</w>": 19788, "argued</w>": 48153, "argues</w>": 30045, "arguing</w>": 26549, "argument</w>": 16224, "arguments</w>": 24693, "argus</w>": 44300, "argy": 21066, "argyle</w>": 36179, "argyll</w>": 40667, "ari": 1221, "ari</w>": 3681, "aria</w>": 8883, "arial</w>": 42431, "arian": 29980, "arian</w>": 6953, "ariana</w>": 14892, "arianag": 23025, "arianagrande</w>": 23321, "arianism</w>": 44351, "arians</w>": 19104, "arias</w>": 22567, "arie</w>": 18774, "ariel": 47959, "ariel</w>": 21025, "aries</w>": 5213, "arif</w>": 46621, "arily</w>": 12993, "arin": 29564, "arin</w>": 18612, "arina</w>": 29271, "arine</w>": 29586, "aring</w>": 2142, "ario</w>": 8862, "arios</w>": 25392, "aris</w>": 15227, "arise</w>": 26490, "arist</w>": 12110, "aristo": 25666, "aristotle</w>": 49156, "arities</w>": 31069, "arity</w>": 16608, "arium</w>": 11809, "arius</w>": 21482, "ariz": 6516, "arized</w>": 40167, "arizon": 28936, "arizona</w>": 7106, "arjun": 24565, "arjun</w>": 20477, "arjuna</w>": 43835, "ark": 11921, "ark</w>": 12010, "arkansas</w>": 12227, "arkham</w>": 36381, "arl</w>": 48542, "arlington": 44940, "arlington</w>": 17865, "arly</w>": 3637, "arm": 5671, "arm</w>": 4793, "arma": 15887, "arma</w>": 38716, "armad": 37897, "armada</w>": 34938, "armagh</w>": 44313, "armani</w>": 31314, "armb": 37096, "armchair</w>": 45757, "armed": 40471, "armed</w>": 8202, "armen": 13145, "armenia</w>": 22008, "armenian</w>": 24891, "armies</w>": 46686, "armin": 45481, "arming</w>": 19766, "armist": 38150, "armistice</w>": 46765, "armor</w>": 16167, "armored</w>": 28214, "armory</w>": 38610, "armour</w>": 18503, "armoured</w>": 42514, "arms</w>": 5706, "armstrong</w>": 15005, "army": 13541, "army</w>": 3133, "armys</w>": 27311, "arn": 9348, "arn</w>": 37597, "arnau": 45556, "arne</w>": 43509, "arney</w>": 35962, "arnold": 49096, "arnold</w>": 13609, "arns</w>": 46692, "aro": 7514, "aro</w>": 11551, "aroa</w>": 48209, "arom": 16831, "aroma": 40143, "aroma</w>": 26390, "aromas</w>": 47439, "aromatherapy</w>": 42584, "aromatic</w>": 39669, "aron": 30855, "aron</w>": 28926, "aroo</w>": 47581, "arora</w>": 31897, "arosa</w>": 44264, "arose</w>": 44262, "around": 35615, "around</w>": 1630, "arqu": 35654, "arquitec": 41703, "arr": 39106, "arr</w>": 42489, "arra": 32918, "arra</w>": 43827, "arrahman</w>": 44554, "arran</w>": 45722, "arrang": 16711, "arrange": 15410, "arrange</w>": 26311, "arranged</w>": 22451, "arrangement</w>": 23822, "arrangements</w>": 23792, "arranging</w>": 35321, "array</w>": 17293, "arre": 4374, "arrell</w>": 28846, "arrest</w>": 9320, "arrested</w>": 5845, "arresting</w>": 43930, "arrests</w>": 20683, "arri": 2115, "arrival</w>": 9073, "arrivals</w>": 19583, "arrive</w>": 8851, "arrived</w>": 3514, "arrives</w>": 9905, "arriving</w>": 10884, "arro": 15729, "arrog": 26997, "arrogance</w>": 47025, "arrogant</w>": 40582, "arrow": 30920, "arrow</w>": 11149, "arrowhead</w>": 46393, "arrows</w>": 24768, "arroyo</w>": 45237, "ars": 42815, "ars</w>": 864, "arse</w>": 22665, "arsen": 5330, "arsenal": 45234, "arsenal</w>": 6084, "arsene</w>": 32117, "arson</w>": 29937, "art": 1486, "art</w>": 794, "arta</w>": 12031, "arte": 13482, "arte</w>": 12947, "artem": 40387, "artemis</w>": 45256, "arten</w>": 37043, "arter": 29449, "artery</w>": 40062, "artes</w>": 48629, "artforsale</w>": 48239, "artgallery</w>": 31982, "arth": 7146, "arth</w>": 20265, "arthistory</w>": 39313, "arthr": 20807, "arthritis</w>": 22916, "arthro": 43255, "arthur": 35660, "arthur</w>": 8550, "arti": 1635, "arti</w>": 34601, "artic": 3003, "articho": 30937, "artichoke</w>": 39647, "article</w>": 3550, "articles</w>": 11939, "articul": 40343, "articulate</w>": 45444, "artif": 8950, "artifact</w>": 37718, "artifacts</w>": 30249, "artificial": 19357, "artificial</w>": 12040, "artificialintelligence</w>": 20799, "artillery</w>": 24465, "artin": 33168, "artin</w>": 48540, "artis</w>": 41794, "artisan": 36389, "artisan</w>": 21535, "artisans</w>": 40140, "artist": 14326, "artist</w>": 2456, "artiste</w>": 41402, "artistic</w>": 12421, "artiston": 48443, "artistry</w>": 38570, "artists</w>": 4899, "artistson": 32127, "artistsontwitter</w>": 39469, "artlovers</w>": 35617, "arto": 28464, "artof": 31751, "artoftheday</w>": 43990, "arton</w>": 46744, "arts": 22040, "arts</w>": 3812, "artsy</w>": 31588, "arturo</w>": 38591, "artwit</w>": 36713, "artwork</w>": 4188, "artworks</w>": 26215, "arty": 45417, "arty</w>": 25916, "aru": 13757, "aru</w>": 23907, "aruba</w>": 40131, "arugula</w>": 40770, "arum</w>": 48732, "arun": 16105, "arun</w>": 31877, "arunach": 47260, "arunjaitley</w>": 44874, "arus</w>": 22644, "arvin": 16971, "arvind": 21209, "arvind</w>": 41079, "arvindkejriwal</w>": 22971, "arvo</w>": 45726, "arwx</w>": 29824, "ary": 4617, "ary</w>": 856, "arya</w>": 23594, "aryan</w>": 34966, "as": 587, "as</w>": 601, "asa": 39676, "asa</w>": 11914, "asad</w>": 42376, "asaki</w>": 22455, "asam": 40603, "asan": 22379, "asan</w>": 17841, "asana</w>": 42363, "asant</w>": 25536, "asants</w>": 37766, "asap": 24199, "asap</w>": 10822, "asar": 24733, "asar</w>": 49299, "asb</w>": 31186, "asbe": 32113, "asbestos</w>": 33765, "asc": 22720, "asc</w>": 23305, "ascen": 20767, "ascension</w>": 35499, "ascent</w>": 36625, "asci": 12753, "asco": 25578, "asco</w>": 17488, "ascot</w>": 23723, "ascri": 15506, "asd</w>": 36988, "asda</w>": 29391, "asdf": 36857, "asdfghj": 42758, "asdfghjkl</w>": 47660, "ase": 8083, "ase</w>": 894, "asean</w>": 24472, "aseball</w>": 46903, "ased</w>": 2134, "asen": 41085, "aser": 39615, "aser</w>": 7209, "ases</w>": 3762, "asf</w>": 25863, "asg</w>": 34813, "ash": 2067, "ash</w>": 2612, "asha": 40572, "asha</w>": 13472, "ashamed</w>": 20633, "ashby</w>": 46531, "ashe": 48523, "ashe</w>": 31752, "asher</w>": 37585, "ashes</w>": 12587, "asheville</w>": 28897, "ashford</w>": 37796, "ashi": 15563, "ashi</w>": 15934, "ashish</w>": 33145, "ashland</w>": 39938, "ashleigh</w>": 49356, "ashley": 17825, "ashley</w>": 8957, "asho": 20273, "ashok</w>": 38141, "ashore</w>": 31194, "ashram</w>": 43445, "ashton": 43264, "ashton</w>": 12228, "ashtra</w>": 18118, "asi": 3596, "asi</w>": 12562, "asia</w>": 5741, "asian": 21737, "asian</w>": 7128, "asiangames</w>": 49108, "asians</w>": 36771, "asics</w>": 31097, "aside</w>": 13676, "asif</w>": 37302, "asim</w>": 46050, "asin": 48432, "asin</w>": 44347, "asing</w>": 4194, "asingly</w>": 15803, "asion</w>": 31753, "asis</w>": 12398, "ask": 11027, "ask</w>": 2765, "asked</w>": 3993, "asking</w>": 5914, "asks</w>": 7953, "asl</w>": 41650, "asleep</w>": 10749, "asley</w>": 28206, "asli</w>": 44290, "asm</w>": 13851, "asma</w>": 38497, "asmsg</w>": 19839, "aso": 30343, "aso</w>": 27932, "asober": 43749, "asocial</w>": 48557, "ason</w>": 1163, "asone</w>": 31249, "asons</w>": 4249, "asos</w>": 37924, "asot</w>": 47968, "asp": 17814, "asp</w>": 36666, "asparag": 20301, "asparagus</w>": 20604, "aspe": 10894, "aspect</w>": 19681, "aspects</w>": 18203, "aspen": 35695, "aspen</w>": 25712, "asper": 32991, "asph": 28019, "asphalt</w>": 30574, "aspir": 12669, "aspirations</w>": 36127, "aspire</w>": 24836, "aspiring</w>": 21862, "asports</w>": 43695, "asr</w>": 48052, "asroma</w>": 41000, "ass": 12664, "ass</w>": 5301, "assa</w>": 47715, "assad</w>": 18699, "assam</w>": 19930, "assan": 26352, "assange</w>": 27565, "assas": 9603, "assassin": 14366, "assassin</w>": 20029, "assassinated</w>": 40488, "assassination</w>": 24907, "assassins</w>": 34918, "assassinscre": 36428, "assassinscreed</w>": 46082, "assau": 7908, "assaul": 19596, "assault</w>": 9679, "assaulted</w>": 30785, "assaulting</w>": 44143, "asse": 3166, "asse</w>": 38600, "assel</w>": 37582, "assemb": 5531, "assemble</w>": 26169, "assembled</w>": 22627, "assemblies</w>": 47406, "assembling</w>": 38670, "assembly": 34542, "assembly</w>": 7059, "assen</w>": 38651, "asser": 25665, "asses</w>": 21596, "assess": 9209, "assess</w>": 23211, "assessed</w>": 44160, "assessing</w>": 31364, "assessment</w>": 10590, "assessments</w>": 32753, "asset": 48463, "asset</w>": 13039, "assets</w>": 13170, "assi": 2907, "assi</w>": 39540, "assie</w>": 31624, "assign": 14190, "assigned</w>": 25767, "assignment</w>": 17342, "assignments</w>": 34257, "assim</w>": 36394, "assimil": 43467, "assist": 26558, "assist</w>": 10286, "assistance</w>": 11685, "assistant</w>": 6799, "assistants</w>": 31054, "assisted</w>": 18095, "assisting</w>": 24243, "assists</w>": 12675, "assn</w>": 44208, "asso</w>": 17617, "assoc</w>": 18891, "associ": 3566, "associate</w>": 11777, "associated</w>": 11164, "associates</w>": 17358, "association</w>": 5578, "associations</w>": 33209, "assor": 38604, "assorted</w>": 36701, "assortment</w>": 43112, "asst</w>": 24767, "assu": 8328, "assume</w>": 19294, "assumed</w>": 37661, "assuming</w>": 29422, "assump": 41182, "assumption</w>": 40773, "assumptions</w>": 45948, "assurance</w>": 28408, "assure</w>": 39161, "assured</w>": 25591, "assures</w>": 41988, "assy": 29940, "assy</w>": 12963, "ast": 1761, "ast</w>": 1242, "asta</w>": 43269, "aste": 25033, "aste</w>": 25579, "aster": 11013, "aster</w>": 9526, "asteroid</w>": 32253, "asters</w>": 33139, "asth": 16684, "asthma</w>": 24610, "asthour</w>": 41238, "astic</w>": 15876, "asting</w>": 29984, "astle</w>": 46141, "asto": 47275, "aston": 24760, "aston</w>": 13879, "astoni": 21962, "astonishing</w>": 27110, "astonmartin</w>": 40760, "astor": 26391, "astor</w>": 47086, "astoria</w>": 34798, "astounding</w>": 37748, "astr": 37609, "astra": 47205, "astra</w>": 36079, "astral</w>": 45889, "astri": 31243, "astrid</w>": 46499, "astro": 8563, "astro</w>": 15318, "astrology</w>": 28526, "astron": 7982, "astronaut</w>": 18376, "astronauts</w>": 29733, "astronom": 23264, "astronomer</w>": 40036, "astronomers</w>": 44268, "astronomical</w>": 39775, "astronomy</w>": 17472, "astrophotography</w>": 38559, "astros</w>": 17598, "asts</w>": 10452, "astu": 43137, "astur": 45795, "asu": 13157, "asu</w>": 16001, "asun": 36044, "asure</w>": 3813, "asus</w>": 27269, "aswell</w>": 42978, "asx</w>": 38906, "asy": 8524, "asy</w>": 2333, "asylum</w>": 15638, "asym": 32539, "at": 527, "at</w>": 536, "ata</w>": 4236, "atable</w>": 23909, "atal": 24877, "atal</w>": 24797, "atan</w>": 33446, "atar": 20128, "atar</w>": 7995, "atari</w>": 21549, "atas</w>": 30057, "atay": 39518, "atc</w>": 28383, "atch</w>": 15938, "atd</w>": 33890, "ate": 992, "ate</w>": 671, "ateam</w>": 42784, "ateau</w>": 16359, "atec": 37352, "atech</w>": 31306, "ated": 14589, "ated</w>": 943, "atedly</w>": 24698, "atee</w>": 32839, "ateful</w>": 5419, "atelier</w>": 29932, "ately</w>": 3862, "atem": 17116, "aten</w>": 47984, "atene": 30405, "ateneo</w>": 33904, "ater": 18597, "ater</w>": 5877, "ateral</w>": 18819, "aters</w>": 22364, "ates": 20370, "ates</w>": 1150, "atest</w>": 1705, "ateur</w>": 43677, "atf</w>": 28013, "ath": 1374, "ath</w>": 1649, "atha</w>": 22530, "atham</w>": 23383, "athan": 41260, "athan</w>": 26701, "athe": 8963, "athed</w>": 47402, "atheism</w>": 25823, "atheist</w>": 22571, "atheists</w>": 47155, "athen": 29112, "athena</w>": 30705, "athens</w>": 13524, "ather": 6171, "ather</w>": 1817, "athered</w>": 34091, "athers</w>": 17266, "athi</w>": 28918, "athing</w>": 36069, "athle": 3310, "athlete</w>": 7388, "athletes</w>": 7125, "athletic": 33182, "athletic</w>": 9028, "athletics</w>": 7019, "athlon</w>": 14670, "athome</w>": 38217, "athon</w>": 4951, "aths</w>": 28835, "athy": 34488, "athy</w>": 13183, "ati": 591, "ati</w>": 6751, "atia</w>": 10908, "atic": 20248, "atic</w>": 2647, "atically</w>": 13558, "atics</w>": 15666, "atie</w>": 30137, "aties</w>": 40060, "atif</w>": 41592, "atiku</w>": 37912, "atile</w>": 15474, "atility</w>": 23373, "atime</w>": 20158, "atin": 36903, "atin</w>": 23047, "atine</w>": 39741, "ating": 25653, "ating</w>": 1074, "atio</w>": 35401, "ation": 2265, "ation</w>": 656, "ational": 14205, "ational</w>": 3108, "ationals</w>": 44593, "ationday</w>": 20082, "ations</w>": 986, "atis": 45456, "atis</w>": 41142, "atism</w>": 45638, "ative": 18422, "ative</w>": 1648, "atively</w>": 11929, "atives</w>": 5629, "ativity</w>": 25166, "atkins</w>": 27734, "atkinson</w>": 28908, "atl": 5411, "atl</w>": 10629, "atla": 36043, "atlan": 6818, "atlanta": 39964, "atlanta</w>": 6839, "atlantic": 28804, "atlantic</w>": 8189, "atlantis</w>": 27790, "atlas</w>": 15775, "atle": 21170, "atleast</w>": 33231, "atleti</w>": 46067, "atletico</w>": 27501, "atm</w>": 14127, "atmo": 8271, "atmosphere</w>": 10506, "atmospheric</w>": 24223, "ato": 7987, "ato</w>": 4364, "atoday</w>": 26799, "atom": 22418, "atom</w>": 24031, "atomic</w>": 18996, "atoms</w>": 41434, "aton": 31525, "aton</w>": 10012, "atop</w>": 17455, "ator": 10748, "ator</w>": 1962, "atore</w>": 28314, "atorial</w>": 32040, "atories</w>": 35678, "atorium</w>": 41306, "ators</w>": 3389, "atory</w>": 5920, "atos</w>": 41643, "atour</w>": 42967, "atown</w>": 24000, "atp": 38105, "atp</w>": 19817, "atr</w>": 43247, "atra": 20227, "atra</w>": 14401, "atravel</w>": 36981, "atre": 46057, "atri": 13882, "atri</w>": 38889, "atric": 32238, "atric</w>": 13652, "atrics</w>": 36253, "atrist</w>": 41879, "atrium</w>": 29725, "atrix</w>": 43003, "atro": 18724, "atroc": 36197, "atrocities</w>": 37551, "atry</w>": 28334, "ats": 46890, "ats</w>": 1032, "atsu</w>": 26531, "att": 1017, "att</w>": 7103, "atta": 7282, "atta</w>": 9146, "attach": 43676, "attach</w>": 35653, "attached</w>": 11038, "attachment</w>": 28638, "attack": 24971, "attack</w>": 3815, "attacked</w>": 12366, "attacker</w>": 39288, "attackers</w>": 47701, "attacking</w>": 16813, "attacks</w>": 7321, "attain</w>": 46459, "attar</w>": 37110, "attemp": 4933, "attempt</w>": 7409, "attempted</w>": 17408, "attempting</w>": 18195, "attempts</w>": 15610, "atten": 4084, "atten</w>": 32408, "attenborough</w>": 45860, "attend": 9841, "attend</w>": 5802, "attendance</w>": 11928, "attendant</w>": 35424, "attended</w>": 8140, "attendees</w>": 14648, "attending</w>": 6696, "attends</w>": 22248, "attention</w>": 4936, "atters</w>": 30675, "atthe": 21489, "atti": 49265, "atti</w>": 16235, "attic</w>": 26766, "attire</w>": 21222, "attitude</w>": 10648, "attitudes</w>": 27611, "attle": 14685, "attle</w>": 5030, "attn</w>": 25677, "attor": 8856, "attorney</w>": 10372, "attorneys</w>": 29113, "attrac": 7154, "attract</w>": 17010, "attracted</w>": 28493, "attracting</w>": 31909, "attraction</w>": 16807, "attractions</w>": 22307, "attractive</w>": 12231, "attracts</w>": 31024, "attribu": 24624, "attributed</w>": 37520, "attributes</w>": 40763, "attu</w>": 43173, "atty</w>": 36705, "atu": 15191, "atu</w>": 24295, "atuesday</w>": 34841, "atul": 1744, "atul</w>": 43948, "atum</w>": 48295, "atur": 14986, "aturday</w>": 29027, "ature": 25305, "ature</w>": 4490, "atures</w>": 7358, "atus</w>": 14795, "atv</w>": 19598, "atwood</w>": 45680, "atwork</w>": 39680, "atx": 34849, "atx</w>": 20136, "aty": 40974, "aty</w>": 33107, "atz</w>": 30432, "au": 627, "au</w>": 2566, "aua</w>": 45906, "aub</w>": 45938, "auberg": 49382, "aubre": 25899, "aubrey</w>": 34110, "auburn": 42269, "auburn</w>": 14534, "auc": 24489, "auch</w>": 43024, "auck": 14588, "auckland</w>": 16072, "auction": 48160, "auction</w>": 6462, "auctioned</w>": 41073, "auctions</w>": 24876, "aucus</w>": 47374, "aud": 16107, "aud</w>": 19711, "audi": 5091, "audi</w>": 10277, "audible</w>": 33227, "audience</w>": 6863, "audiences</w>": 22328, "audio": 13792, "audio</w>": 5766, "audiobook</w>": 26282, "audit": 12505, "audit</w>": 17625, "auditi": 37377, "audition</w>": 18673, "auditions</w>": 21134, "auditor</w>": 38050, "auditorium</w>": 15063, "audre": 16075, "audrey</w>": 18812, "audu": 27934, "audubon</w>": 40275, "auer</w>": 33460, "auf</w>": 28924, "aug": 15397, "aug</w>": 5720, "auga</w>": 22797, "augh": 28310, "augh</w>": 14005, "augmente": 48356, "augmented</w>": 32708, "augu": 2610, "august": 24353, "august</w>": 3171, "augusta</w>": 26144, "augustine</w>": 27397, "augustus</w>": 36835, "auk</w>": 19058, "aul": 20695, "aul</w>": 34391, "ault": 47253, "ault</w>": 10219, "aun": 10608, "aun</w>": 38721, "aunt</w>": 12685, "auntie</w>": 23783, "aunty</w>": 29528, "aur": 8156, "aur</w>": 17282, "aura</w>": 27728, "aure": 36010, "aureli": 35980, "auror": 30067, "aurora</w>": 13500, "aus": 10624, "aus</w>": 7630, "ausa</w>": 37384, "ausbiz</w>": 46543, "ausch": 33926, "auschwitz</w>": 36523, "ausopen</w>": 27831, "ausp": 35039, "auspicious</w>": 38806, "auspol</w>": 8241, "aussi": 19762, "aussie": 40230, "aussie</w>": 14424, "aussies</w>": 35727, "aust": 26301, "aust</w>": 25418, "austen</w>": 29885, "auster": 25030, "austerity</w>": 26982, "austin": 12845, "austin</w>": 5125, "austinmahone</w>": 34678, "austr": 2518, "australi": 13798, "australia</w>": 3444, "australian": 23630, "australian</w>": 6258, "australians</w>": 31488, "austri": 8946, "austria</w>": 11960, "austrian</w>": 20638, "ausv": 35206, "ausvotes</w>": 34661, "aut</w>": 12343, "auth": 2381, "auth</w>": 38247, "authent": 18158, "authentic": 41266, "authentic</w>": 10369, "authentication</w>": 39746, "authenticity</w>": 35734, "autho": 34552, "author": 14447, "author</w>": 4358, "authored</w>": 37928, "authori": 19207, "authorities</w>": 12729, "authority</w>": 10524, "authorization</w>": 48854, "authorized</w>": 28463, "authors</w>": 10765, "auti": 8200, "autism": 36256, "autism</w>": 11244, "autisma": 43324, "autistic</w>": 29360, "auto": 3917, "auto</w>": 5668, "autobiography</w>": 31509, "autodesk</w>": 40415, "autograph": 10657, "autograph</w>": 13722, "autographed</w>": 16309, "autographs</w>": 17376, "autoimmune</w>": 45509, "autom": 4114, "automate</w>": 43203, "automated</w>": 19022, "automatic</w>": 12126, "automatically</w>": 20725, "automation</w>": 12328, "automobi": 44813, "automobile</w>": 25258, "automotive</w>": 12607, "auton": 13100, "autonews</w>": 43975, "autonom": 17870, "autonomous</w>": 20722, "autonomy</w>": 39223, "autopsy</w>": 44436, "autos</w>": 31118, "autoshow</w>": 46788, "auts</w>": 21140, "autu": 5445, "autum": 31783, "autumn": 28940, "autumn</w>": 6110, "autumnal</w>": 35481, "aux": 18154, "aux</w>": 8909, "auxiliary</w>": 37778, "av": 722, "av</w>": 8484, "ava</w>": 12385, "avage</w>": 31505, "avail": 1651, "avail</w>": 16686, "availability</w>": 17551, "available</w>": 1685, "aval": 18012, "avalan": 23970, "avalanche</w>": 25815, "avalley</w>": 45082, "avalon</w>": 30436, "avan": 27971, "avan</w>": 33351, "avant</w>": 24305, "avar": 33423, "avatar</w>": 18219, "ave": 10062, "ave</w>": 4860, "avec</w>": 25828, "aved</w>": 47918, "avel": 46817, "avel</w>": 48088, "aven": 5963, "aven</w>": 32971, "aveng": 21935, "avenger</w>": 24799, "avengers": 39413, "avengers</w>": 12016, "avengersendgame</w>": 49342, "avent": 22700, "avenue</w>": 7042, "aver": 8788, "aver</w>": 11403, "average</w>": 6254, "averaged</w>": 37310, "averages</w>": 48982, "averaging</w>": 35266, "avery</w>": 20313, "aves</w>": 14023, "avfc</w>": 21304, "avg</w>": 19452, "avgeek</w>": 11114, "avi": 3324, "avi</w>": 11297, "avia</w>": 38710, "avian</w>": 24115, "aviation": 27717, "aviation</w>": 7617, "aviator</w>": 38921, "aviators</w>": 48011, "avici": 46192, "avicii</w>": 49158, "avid</w>": 19118, "avier</w>": 14598, "avila</w>": 45339, "aville</w>": 40689, "avin": 46204, "avis": 45163, "avis</w>": 19765, "aviv</w>": 22130, "aviva</w>": 47122, "aviz</w>": 27607, "avl": 44749, "avo": 4496, "avo</w>": 32400, "avoc": 12291, "avocado</w>": 14135, "avocados</w>": 48911, "avoi": 16797, "avoid": 30448, "avoid</w>": 5983, "avoidance</w>": 47983, "avoided</w>": 32103, "avoiding</w>": 22086, "avoids</w>": 48220, "avon": 22790, "avon</w>": 17348, "avril</w>": 37763, "avs</w>": 31896, "avut</w>": 44472, "avy</w>": 29973, "aw": 808, "aw</w>": 5557, "awa": 4820, "awa</w>": 6872, "await</w>": 20769, "awaited</w>": 20092, "awaiting</w>": 14872, "awaits</w>": 15635, "awak": 9776, "awak</w>": 41387, "awake</w>": 14695, "awaken</w>": 35412, "awakening</w>": 17017, "awakens</w>": 23191, "awal": 42447, "awal</w>": 35090, "awan": 48869, "awan</w>": 20420, "awar": 5745, "award": 36310, "award</w>": 2047, "awarded</w>": 7368, "awarding</w>": 37089, "awards": 34528, "awards</w>": 2320, "aware": 4427, "aware</w>": 7196, "awareness": 19217, "awareness</w>": 4823, "awarenessmonth</w>": 34278, "awarenessweek</w>": 35294, "away": 21088, "away</w>": 1520, "aways</w>": 12782, "awaz</w>": 18586, "awd</w>": 34846, "awe": 1693, "awe</w>": 14106, "aweather": 42142, "aweather</w>": 28681, "awec</w>": 38916, "aweed</w>": 29724, "awesom": 16727, "awesome": 30390, "awesome</w>": 1848, "awesomeness</w>": 22430, "awful</w>": 13617, "awg</w>": 46350, "awgs</w>": 35275, "awh</w>": 39566, "awhile</w>": 19171, "awi</w>": 15167, "awil": 47271, "awilliams</w>": 42163, "awk": 8888, "awk</w>": 40943, "awkward": 42337, "awkward</w>": 10304, "awn</w>": 46222, "awp</w>": 43300, "aws</w>": 19658, "awsome</w>": 47196, "awson</w>": 36286, "aww</w>": 11568, "awww</w>": 15634, "awwww</w>": 26460, "awx</w>": 28385, "ax": 3165, "ax</w>": 9203, "axe</w>": 19861, "axel": 47889, "axel</w>": 32131, "axes</w>": 45970, "axi": 30672, "axial</w>": 46550, "axis</w>": 19614, "axle</w>": 39003, "axx": 47411, "ay": 658, "ay</w>": 551, "aya</w>": 5917, "ayala</w>": 39827, "ayama</w>": 41194, "ayan": 37781, "ayan</w>": 16269, "ayana</w>": 37400, "ayas</w>": 40904, "ayat": 44902, "ayat</w>": 35720, "aye": 21661, "aye</w>": 12446, "ayer</w>": 24852, "ayers</w>": 42783, "ayesha</w>": 46570, "ayi</w>": 33025, "ayles": 44706, "ayne</w>": 35669, "ayo": 21929, "ayo</w>": 18708, "ayr": 23002, "ayr</w>": 36473, "ayrshire</w>": 32687, "ays</w>": 785, "ayu</w>": 40769, "ayurve": 27185, "ayurveda</w>": 38986, "ayush": 44831, "ayy</w>": 32514, "ayyy</w>": 41052, "az": 854, "az</w>": 5468, "aza</w>": 22883, "azad</w>": 37838, "azalea</w>": 34087, "azam</w>": 34727, "azar</w>": 27911, "azcardinals</w>": 48846, "aze": 41157, "aze</w>": 28485, "azer": 19169, "azerbai": 20649, "azerbaijan</w>": 23888, "azhar</w>": 47019, "azi": 23914, "azi</w>": 18452, "azine</w>": 29140, "azione</w>": 48335, "aziz": 41205, "aziz</w>": 29630, "azo</w>": 41227, "azon</w>": 36854, "azores</w>": 42826, "azte": 33270, "aztec</w>": 34749, "aztecs</w>": 49387, "azu": 27701, "azu</w>": 46963, "azul</w>": 39807, "azure</w>": 18514, "azwx</w>": 30262, "azy</w>": 24783, "azz": 9817, "azz</w>": 26453, "azza</w>": 22255, "azzi</w>": 18758, "azzle</w>": 39974, "azzo</w>": 26779, "azzur": 37055, "azzy</w>": 44534, "añ": 23716, "años</w>": 41634, "b": 65, "b</w>": 321, "ba": 932, "ba</w>": 1792, "baa</w>": 33004, "baahu": 34145, "baahubali</w>": 38663, "bab": 1202, "bab</w>": 19039, "baba</w>": 12631, "babe": 31177, "babe</w>": 7716, "babes</w>": 14253, "babies</w>": 6635, "babs</w>": 36217, "babu</w>": 21623, "baby": 7268, "baby</w>": 1794, "babygirl</w>": 39554, "babylon</w>": 31928, "babymetal</w>": 45013, "babys": 22266, "babysitting</w>": 34186, "bac": 2791, "bac</w>": 25867, "bacca</w>": 40708, "bach": 11773, "bach</w>": 8758, "bachchan</w>": 17690, "bachel": 11283, "bachelor": 45508, "bachelor</w>": 16766, "bachelore": 26009, "bachelorette</w>": 29093, "bacher</w>": 49211, "back": 1663, "back</w>": 893, "backbone</w>": 35635, "backdrop</w>": 20802, "backed</w>": 12721, "backer</w>": 22183, "backers</w>": 32934, "background</w>": 5994, "backgrounds</w>": 28215, "backing</w>": 14935, "backlash</w>": 31519, "backpack</w>": 14894, "backpacking</w>": 29524, "backpacks</w>": 37063, "backs</w>": 7562, "backseat</w>": 48812, "backstage</w>": 9236, "backstreet": 46337, "backthe": 26127, "backto": 18703, "backtoschool</w>": 28730, "backtothe": 43059, "backup</w>": 14415, "backward</w>": 37964, "backwards</w>": 21283, "backyard</w>": 12608, "bacon": 48666, "bacon</w>": 7104, "bacter": 11814, "bacteria</w>": 16556, "bacterial</w>": 26101, "bad": 2564, "bad</w>": 2103, "bada</w>": 37475, "badan</w>": 39149, "badass</w>": 11616, "baddest</w>": 38112, "baden</w>": 36690, "bader</w>": 42254, "badge</w>": 11301, "badger": 32686, "badger</w>": 22363, "badgers</w>": 22521, "badges</w>": 20084, "badlands</w>": 43192, "badly</w>": 13684, "badminton</w>": 21412, "badoo</w>": 33192, "bados</w>": 25755, "bae": 32834, "bae</w>": 6855, "baek": 18557, "baek</w>": 32702, "baekhyun</w>": 21572, "baes</w>": 46332, "baf": 13616, "baff": 35693, "bafta</w>": 29199, "bag": 3408, "bag</w>": 3365, "bage</w>": 9698, "bagel</w>": 28777, "bagels</w>": 37489, "baggage</w>": 31402, "bagged</w>": 34047, "bagh": 21659, "bagh</w>": 37271, "baghdad</w>": 30763, "bago</w>": 25105, "bags</w>": 6136, "bagu": 27749, "baguette</w>": 45334, "bah": 8372, "bah</w>": 16685, "baha": 29592, "baham": 43718, "bahamas</w>": 21224, "bahan</w>": 28704, "bahn</w>": 33452, "bahrain</w>": 12503, "bai": 6232, "bai</w>": 23339, "bail": 22933, "bail</w>": 16986, "bailey": 27535, "bailey</w>": 10180, "bain": 40784, "bain</w>": 21593, "bair": 29059, "baird</w>": 40474, "bait</w>": 18010, "baj": 20713, "baja": 40418, "baja</w>": 28374, "bajo</w>": 32619, "bak": 4059, "bak</w>": 23742, "bakar</w>": 41414, "bake": 20736, "bake</w>": 11878, "baked</w>": 10364, "baker": 27303, "baker</w>": 7743, "bakers": 35293, "bakers</w>": 40231, "bakersfield</w>": 40149, "bakery</w>": 13377, "bakes</w>": 43057, "bakhta": 44912, "bakhtawar": 46937, "bakhtawarbz</w>": 47118, "baking</w>": 11467, "baku": 46417, "baku</w>": 31852, "bal": 1398, "bal</w>": 2282, "bala</w>": 20291, "balaji</w>": 48694, "balance": 42894, "balance</w>": 6827, "balanced</w>": 15273, "balances</w>": 37733, "balancing</w>": 23541, "balboa</w>": 45098, "balcony</w>": 16169, "bald": 11153, "bald</w>": 14875, "baldhead</w>": 29191, "baldwin</w>": 16242, "bale": 48573, "bale</w>": 18873, "bales</w>": 42879, "bali": 16432, "bali</w>": 10900, "balkan</w>": 48499, "balkans</w>": 42987, "ball": 3807, "ball</w>": 1069, "balla": 42246, "ballad</w>": 33472, "ballarat</w>": 46645, "ballard</w>": 31750, "baller": 49194, "baller</w>": 25655, "ballerina</w>": 34962, "ballers</w>": 34173, "ballet</w>": 10703, "balli": 29406, "ballin": 47444, "ballin</w>": 33057, "balling</w>": 47588, "ballis": 46675, "ballistic</w>": 36667, "ballo": 8871, "ballon</w>": 36469, "balloon</w>": 13634, "balloons</w>": 18130, "ballot</w>": 14185, "ballots</w>": 35051, "ballpark</w>": 26080, "ballroom</w>": 15493, "balls</w>": 6927, "bally": 17275, "bally</w>": 29451, "balm</w>": 24962, "balmain</w>": 45929, "balo": 12395, "baloch</w>": 23173, "balochistan</w>": 21918, "balot": 44615, "balotelli</w>": 45721, "bals</w>": 44154, "balsam": 29121, "balsamic</w>": 32654, "balt</w>": 24441, "balti": 8400, "baltic</w>": 23817, "baltimore": 38502, "baltimore</w>": 9582, "balu": 38093, "bam": 6383, "bam</w>": 12686, "bama</w>": 20021, "bambam</w>": 34538, "bambi</w>": 46596, "bamboo": 49322, "bamboo</w>": 16748, "ban": 1159, "ban</w>": 2777, "bana</w>": 18428, "banan": 38410, "banana</w>": 8922, "bananas</w>": 19121, "banc": 39252, "band": 4613, "band</w>": 1963, "banda</w>": 31865, "bandai</w>": 42054, "bandana</w>": 39265, "bandcamp</w>": 32229, "banded</w>": 37804, "bandic": 44400, "bandit</w>": 27639, "bandits</w>": 33940, "bandra</w>": 41393, "bands</w>": 7858, "bandung</w>": 29512, "bandwagon</w>": 36432, "bandwidth</w>": 48859, "bane</w>": 9597, "banerjee</w>": 48102, "banff</w>": 29565, "bang": 3524, "bang</w>": 6907, "bangalore</w>": 14697, "banger</w>": 24872, "bangers</w>": 38311, "banging</w>": 33033, "bangkok</w>": 12351, "bangla": 10339, "bangla</w>": 45928, "bangladesh</w>": 11245, "bangle</w>": 37634, "bangor</w>": 31190, "bangs</w>": 27992, "bangtan</w>": 39131, "bani</w>": 19732, "banjo</w>": 27014, "bank": 7061, "bank</w>": 2723, "banker</w>": 27316, "bankers</w>": 30599, "bankholiday": 48868, "banking</w>": 9566, "bankno": 49201, "bankof": 39120, "bankrup": 21904, "bankrupt": 23077, "bankrupt</w>": 37288, "bankruptcy</w>": 23978, "banks</w>": 6367, "banksy</w>": 33350, "bann": 5304, "banned</w>": 12012, "banner</w>": 9185, "banners</w>": 23145, "banning</w>": 26246, "bannon</w>": 29710, "bano</w>": 42947, "banquet</w>": 14254, "bans</w>": 15146, "bant": 23301, "bant</w>": 46657, "banter</w>": 25535, "bao": 39487, "bao</w>": 20408, "bap": 7415, "bap</w>": 23754, "bapti": 15477, "baptism</w>": 36765, "baptist</w>": 13274, "baptiste</w>": 45770, "baptized</w>": 45400, "bar": 1040, "bar</w>": 2411, "bara</w>": 19345, "barack": 18670, "barack</w>": 22481, "barackobama</w>": 18885, "barak": 47419, "barak</w>": 16260, "barang": 38446, "barb": 24173, "barb</w>": 20913, "barbados</w>": 26992, "barbar": 7906, "barbara</w>": 10937, "barbarian</w>": 42530, "barbe": 18372, "barbecue</w>": 23501, "barber": 19517, "barber</w>": 12296, "barbershop</w>": 37707, "barbican</w>": 47668, "barbie</w>": 16923, "barca</w>": 22942, "barcel": 6134, "barcelon": 47820, "barcelona</w>": 6412, "barclay": 48877, "barclay</w>": 45276, "barclays</w>": 29538, "bard": 39812, "bard</w>": 17514, "bare": 16023, "bare</w>": 14318, "barefoot</w>": 30327, "barely</w>": 12684, "bargain</w>": 15076, "bargaining</w>": 41282, "bargains</w>": 34126, "barge</w>": 28272, "bari": 21428, "bari</w>": 28016, "barista</w>": 31078, "barit": 46300, "bark": 32333, "bark</w>": 16560, "barker</w>": 20618, "barking</w>": 32676, "barkley</w>": 30266, "barley</w>": 22607, "barlow</w>": 25483, "barn": 10490, "barn</w>": 10942, "barnab": 43272, "barnard</w>": 44332, "barne": 42527, "barnes</w>": 13102, "barnet</w>": 41943, "barnett</w>": 27650, "barney</w>": 24563, "barns</w>": 43759, "barnsley": 37109, "barnsley</w>": 32153, "baro": 17422, "baro</w>": 30817, "baron": 48371, "baron</w>": 19349, "baroness</w>": 45056, "barons</w>": 45596, "baroque</w>": 25065, "barr": 39473, "barr</w>": 22492, "barra": 28442, "barra</w>": 33542, "barrabest</w>": 41376, "barrac": 40835, "barracks</w>": 35822, "barre": 13840, "barre</w>": 38257, "barred</w>": 33261, "barrel</w>": 11703, "barrels</w>": 22059, "barren</w>": 46743, "barrett</w>": 18701, "barri": 8660, "barric": 29189, "barrie</w>": 27090, "barrier</w>": 15706, "barriers</w>": 16321, "barrington</w>": 48954, "barron</w>": 34881, "barrow": 42568, "barrow</w>": 24983, "barry": 18028, "barry</w>": 8461, "barrymore</w>": 49310, "bars</w>": 8616, "barstool": 44826, "bart": 14838, "bart</w>": 12870, "bartender</w>": 33498, "barthol": 48989, "bartlett</w>": 37130, "bartol": 38209, "barton": 48853, "barton</w>": 20345, "baru</w>": 16356, "barun": 38278, "barunsob": 41398, "barça</w>": 32788, "bas": 1244, "bas</w>": 11420, "basa</w>": 26142, "base": 2776, "base</w>": 4579, "baseball": 23479, "baseball</w>": 3470, "based": 35196, "based</w>": 2812, "basel": 42803, "basel</w>": 20903, "baseline</w>": 40648, "baseman</w>": 45910, "basement</w>": 14792, "bases</w>": 20496, "bash": 20462, "bash</w>": 10972, "bashing</w>": 37545, "bashir</w>": 42799, "basic": 40452, "basic</w>": 7696, "basically</w>": 9125, "basics</w>": 15825, "basil": 19225, "basil</w>": 14936, "basilica</w>": 27879, "basin</w>": 16117, "basing": 47321, "basis</w>": 12278, "baske": 3713, "basket</w>": 10338, "basketball": 40023, "basketball</w>": 3835, "baskets</w>": 27787, "basking</w>": 39769, "basque</w>": 37175, "bass": 22831, "bass</w>": 5992, "bassett</w>": 45992, "bassist</w>": 26496, "bast": 28092, "basti": 8559, "bastille</w>": 41874, "bat": 2121, "bat</w>": 6575, "bata</w>": 39277, "batb</w>": 33962, "batch</w>": 9413, "bate": 25034, "bate</w>": 28277, "bateman</w>": 41635, "bates</w>": 21727, "batgirl</w>": 46460, "bath": 6064, "bath</w>": 5713, "bathing</w>": 20144, "bathro": 21201, "bathroom</w>": 8470, "bathrooms</w>": 26434, "baths</w>": 19442, "bathtub</w>": 39942, "bathurst</w>": 36365, "bati": 23362, "bati</w>": 37589, "batman": 27811, "batman</w>": 7223, "baton</w>": 24331, "bats</w>": 14984, "batsman</w>": 35432, "batt": 2407, "batt</w>": 48595, "battalion</w>": 20820, "batter": 12654, "batter</w>": 31855, "battered</w>": 34375, "batteries</w>": 16666, "battersea</w>": 35839, "battery</w>": 7870, "batting</w>": 17401, "battle": 7344, "battle</w>": 3528, "battled</w>": 37837, "battlefield</w>": 16055, "battlefront</w>": 42214, "battleof": 47560, "battles</w>": 14213, "battleship</w>": 35165, "battling</w>": 17268, "bau": 6055, "bau</w>": 34840, "bauer</w>": 22903, "baugh</w>": 41301, "baum</w>": 19840, "bautista</w>": 31881, "bav": 21075, "bavaria</w>": 39977, "bavarian</w>": 44458, "baw": 19808, "bax": 21216, "baxter</w>": 26168, "bay": 3631, "bay</w>": 2174, "baya</w>": 31573, "bayan</w>": 43895, "bayarea</w>": 28260, "bayer": 48548, "bayer</w>": 29183, "bayern</w>": 14666, "baylor</w>": 21721, "bayou</w>": 33955, "bays</w>": 40156, "baz": 10430, "baz</w>": 25268, "bazaar</w>": 20070, "bazar</w>": 49298, "bb": 1174, "bb</w>": 3529, "bba</w>": 27762, "bball</w>": 15664, "bbb</w>": 33535, "bbc": 5123, "bbc</w>": 5188, "bbcc": 39052, "bbce": 33818, "bbcnews</w>": 29370, "bbcone</w>": 28259, "bbcqt</w>": 37343, "bbcr</w>": 35802, "bbcra": 17115, "bbcradi": 49213, "bbcradio</w>": 22876, "bbcsport</w>": 49321, "bbcspringwatch</w>": 37358, "bbctwo</w>": 40395, "bbcworld</w>": 47340, "bbe</w>": 37559, "bbed</w>": 9077, "bber</w>": 7933, "bbers</w>": 36494, "bbhutto": 28085, "bbhuttozardari</w>": 28135, "bbi</w>": 37047, "bbin</w>": 38553, "bbing</w>": 9787, "bbins</w>": 42504, "bbl</w>": 21961, "bble": 26570, "bble</w>": 5924, "bbled</w>": 37626, "bbles</w>": 18093, "bblo": 21231, "bbloggers</w>": 26614, "bbly</w>": 43031, "bbm</w>": 25382, "bbmas</w>": 22145, "bbn</w>": 28427, "bbnaija</w>": 20984, "bbo</w>": 21892, "bbq": 41270, "bbq</w>": 6726, "bbs</w>": 10002, "bbuk</w>": 45978, "bby": 11166, "bby</w>": 3810, "bc": 3116, "bc</w>": 2162, "bcc</w>": 41509, "bcci</w>": 36138, "bce</w>": 36510, "bcfc</w>": 34359, "bch</w>": 36684, "bcn</w>": 25766, "bcoz</w>": 46373, "bcpoli</w>": 24389, "bcs</w>": 24909, "bcu</w>": 28299, "bd": 24358, "bd</w>": 11165, "bday": 33022, "bday</w>": 5781, "bdg</w>": 48418, "bds</w>": 26732, "be": 571, "be</w>": 655, "bea": 21886, "bea</w>": 20925, "beach": 6068, "beach</w>": 2117, "beaches</w>": 12183, "beachlife</w>": 43824, "beacon": 36883, "beacon</w>": 18858, "beacons</w>": 39395, "bead": 31621, "bead</w>": 23557, "beaded</w>": 26661, "beads</w>": 14099, "beagle</w>": 30044, "beak</w>": 36498, "beal</w>": 45769, "beale</w>": 39717, "beam": 35339, "beam</w>": 13663, "beams</w>": 23993, "bean": 16471, "bean</w>": 5328, "beanie</w>": 21534, "beans</w>": 8302, "bear": 6375, "bear</w>": 4298, "bearable</w>": 38608, "bearcats</w>": 33242, "beard": 26157, "beard</w>": 9052, "bearded</w>": 28459, "beardown</w>": 43687, "beards</w>": 33020, "bearer</w>": 30686, "bearers</w>": 47986, "bearing</w>": 18370, "bearings</w>": 42083, "bearish</w>": 34829, "bears</w>": 6182, "beasley</w>": 43349, "beast": 20847, "beast</w>": 6957, "beastmode</w>": 43076, "beasts</w>": 21771, "beat": 3774, "beat</w>": 3018, "beaten</w>": 10864, "beater</w>": 41974, "beati": 44386, "beating</w>": 10078, "beatles</w>": 11961, "beatport</w>": 31421, "beatrice</w>": 36922, "beats</w>": 6289, "beatthe": 40550, "beatty</w>": 39903, "beatz</w>": 33363, "beau": 1016, "beau</w>": 14298, "beaufort</w>": 45423, "beaumont</w>": 32857, "beaut</w>": 24559, "beauti": 1154, "beauties</w>": 14874, "beautiful": 13662, "beautiful</w>": 1215, "beautifully</w>": 10627, "beauty": 12881, "beauty</w>": 2488, "beav": 23260, "beaver": 26432, "beaver</w>": 22874, "beavers</w>": 34513, "beavs</w>": 43909, "bebe</w>": 23331, "bec": 6899, "bec</w>": 10773, "became</w>": 5464, "because": 32714, "because</w>": 1631, "becca</w>": 27088, "bech": 44055, "beck": 8256, "beck</w>": 10396, "becker</w>": 26918, "beckett</w>": 27249, "beckham</w>": 18764, "becky": 32406, "becky</w>": 18921, "become</w>": 2989, "becomes</w>": 6766, "becoming</w>": 6208, "bed": 4152, "bed</w>": 2722, "bedding</w>": 31761, "bedford</w>": 20779, "bedi</w>": 39181, "bedro": 18415, "bedroom</w>": 8411, "bedrooms</w>": 23996, "beds</w>": 13914, "bedside</w>": 47473, "bedtime</w>": 22115, "bee": 6097, "bee</w>": 5028, "beech": 32733, "beech</w>": 27596, "beef": 21703, "beef</w>": 6529, "beek</w>": 37915, "been": 33986, "been</w>": 1025, "beep</w>": 33432, "beer": 8885, "beer</w>": 2544, "beers</w>": 10907, "bees": 36249, "bees</w>": 9100, "beet": 12582, "beet</w>": 28621, "beethoven</w>": 23656, "beetle</w>": 16534, "beetles</w>": 36317, "beetro": 29251, "beetroot</w>": 31638, "beets</w>": 36087, "before": 20898, "before</w>": 1348, "beg": 2219, "beg</w>": 22401, "began</w>": 8636, "begg": 36769, "begging</w>": 25371, "begin": 19197, "begin</w>": 4947, "beginner</w>": 24351, "beginners</w>": 21930, "beginning</w>": 5791, "beginnings</w>": 22581, "begins</w>": 4635, "begs</w>": 43531, "begun</w>": 10514, "beh": 21971, "beh</w>": 41612, "beha": 5737, "behalf</w>": 11470, "behave</w>": 28825, "behaved</w>": 41617, "behavi": 6149, "behaving</w>": 40745, "behavior</w>": 10461, "behavioral</w>": 25135, "behaviors</w>": 37741, "behaviour</w>": 14655, "behavioural</w>": 46019, "behe": 42329, "behin": 2335, "behind</w>": 2403, "behindthe": 21104, "behindthescenes</w>": 26253, "behold</w>": 15929, "bei": 38991, "bei</w>": 23227, "beige</w>": 26677, "beij": 11547, "beijing</w>": 11796, "bein": 39117, "bein</w>": 24168, "being": 13481, "being</w>": 1265, "beings</w>": 17998, "beingsalmankhan</w>": 19637, "beir": 20176, "beirut</w>": 22352, "beit</w>": 26963, "bek": 46846, "bek</w>": 26135, "bekind</w>": 46691, "bel": 1308, "bel</w>": 3543, "bela</w>": 30555, "belarus</w>": 30849, "belated</w>": 20256, "belfast": 35100, "belfast</w>": 10015, "belgi": 7001, "belgian</w>": 15008, "belgium</w>": 10239, "belgrade</w>": 30502, "beli": 1859, "beli</w>": 45842, "belichick</w>": 46132, "belie": 20854, "beliebers</w>": 27714, "belief</w>": 14802, "beliefs</w>": 20575, "believ": 4972, "believe": 15819, "believe</w>": 2649, "believed</w>": 13380, "believein": 24294, "believeinfilm</w>": 37375, "believer</w>": 26057, "believers</w>": 28434, "believes</w>": 12017, "believing</w>": 19551, "belinda</w>": 44415, "belize</w>": 27990, "bell": 5417, "bell</w>": 3718, "bella": 18282, "bella</w>": 10418, "bellamy</w>": 34461, "bellator</w>": 31985, "belle": 13587, "belle</w>": 11496, "belles</w>": 40678, "bellevue</w>": 32715, "belli": 43335, "bellletstalk</w>": 42695, "bello</w>": 21954, "bells</w>": 12811, "bellum</w>": 35493, "belly": 25901, "belly</w>": 10404, "belmont</w>": 25612, "belo": 8379, "belo</w>": 41649, "belong": 16453, "belong</w>": 13596, "belonged</w>": 39893, "belonging</w>": 28193, "belongs</w>": 14395, "beloved</w>": 9363, "below</w>": 3788, "bels</w>": 43127, "belt": 36416, "belt</w>": 7373, "belts</w>": 21888, "belvedere</w>": 48003, "ben": 1465, "ben</w>": 3518, "bena</w>": 46249, "bench": 17770, "bench</w>": 8771, "benches</w>": 36349, "benchmark</w>": 31775, "bend": 22100, "bend</w>": 13332, "bender</w>": 22551, "bendigo</w>": 48197, "bending</w>": 33897, "bene": 12091, "bene</w>": 47151, "beneath</w>": 16850, "bened": 13216, "benedic": 24402, "benedict": 47896, "benedict</w>": 18027, "benef": 3260, "benefici": 38593, "beneficial</w>": 24660, "beneficiaries</w>": 42160, "benefit</w>": 6399, "benefited</w>": 48266, "benefiting</w>": 29474, "benefits</w>": 5465, "benefitting</w>": 47222, "benevol": 47060, "benfica</w>": 33873, "beng": 6962, "bengal": 17404, "bengal</w>": 16374, "bengali</w>": 33774, "bengals</w>": 23737, "bengaluru</w>": 21707, "benghazi</w>": 25967, "benin</w>": 40296, "benitez</w>": 46711, "benjam": 10550, "benjamin": 38647, "benjamin</w>": 12131, "benji</w>": 43548, "benn</w>": 39097, "bennet</w>": 48536, "bennett</w>": 12186, "benny": 42369, "benny</w>": 20595, "beno": 35268, "benoit</w>": 44373, "benson</w>": 19578, "bent": 9809, "bent</w>": 18369, "bentley</w>": 16859, "benton</w>": 30812, "benz": 27937, "benz</w>": 13470, "ber": 867, "ber</w>": 1516, "bera</w>": 32802, "bere": 17458, "bered</w>": 9193, "beren": 33654, "beret</w>": 41658, "berg": 12022, "berg</w>": 3294, "bergen</w>": 22918, "berger": 35933, "berger</w>": 13873, "bergh</w>": 35120, "bergman</w>": 42597, "bergs</w>": 43592, "berk": 15633, "berke": 14639, "berkeley": 46049, "berkeley</w>": 16667, "berkshire</w>": 27300, "berlin": 23532, "berlin</w>": 5891, "berman</w>": 21514, "bermu": 21032, "bermuda</w>": 24644, "bern": 9195, "bern</w>": 18382, "bernade": 46242, "bernar": 11962, "bernard</w>": 14579, "bernardino</w>": 35328, "bernardo": 27137, "bernardo</w>": 28696, "bernardokath</w>": 29081, "bernat": 40578, "berni": 18798, "bernie": 40093, "bernie</w>": 10503, "berniesanders</w>": 23745, "bernstein</w>": 33936, "berra</w>": 15089, "berries</w>": 8319, "berry": 15334, "berry</w>": 3488, "bers</w>": 6408, "berser": 39037, "bert": 17340, "bert</w>": 2358, "berta</w>": 45187, "berth</w>": 28317, "bertie</w>": 47182, "berto</w>": 34073, "bertr": 36962, "bertrand</w>": 41594, "berts</w>": 30205, "berty</w>": 35973, "berwick</w>": 40407, "bery</w>": 11411, "bes": 26911, "bes</w>": 3635, "beside</w>": 13519, "besides</w>": 17596, "bespoke</w>": 15612, "bess</w>": 43791, "best": 3419, "best</w>": 949, "bestbuy</w>": 29749, "bestest</w>": 31199, "bestfan": 23880, "bestfanarmy</w>": 24590, "bestfriend": 29832, "bestfriend</w>": 11856, "bestfriends</w>": 23555, "besti": 35210, "bestie</w>": 17188, "besties</w>": 27346, "besto": 28615, "bestof": 27892, "bestof</w>": 39533, "bestseller</w>": 25841, "bestselling</w>": 28632, "bet": 1051, "bet</w>": 4430, "beta": 43188, "beta</w>": 9505, "betes</w>": 10255, "beth": 9993, "beth</w>": 4892, "bethan": 18781, "bethany": 39130, "bethany</w>": 27952, "bethe": 12624, "bethel</w>": 33410, "bethesda</w>": 32527, "bethle": 30760, "bethlehem</w>": 31827, "betis</w>": 45590, "beto</w>": 33721, "betra": 18436, "betrayal</w>": 33171, "betrayed</w>": 35692, "bets</w>": 17107, "betsy</w>": 28946, "bett": 17715, "bett</w>": 20489, "betta</w>": 36387, "bette</w>": 35855, "better": 10320, "better</w>": 1539, "bettertogether</w>": 47392, "betting</w>": 14319, "betts</w>": 38637, "betty": 36175, "betty</w>": 14350, "between</w>": 1957, "beu": 38660, "bev": 40324, "bev</w>": 30968, "bever": 9924, "beverage</w>": 18694, "beverages</w>": 28521, "beverley</w>": 39165, "beverly": 30906, "beverly</w>": 16728, "beverlyhills</w>": 45363, "beware</w>": 14532, "bewithyou</w>": 36787, "bex": 18676, "bex</w>": 24748, "bexhill</w>": 49200, "bey": 3234, "bey</w>": 6767, "beyon": 11447, "beyonce</w>": 16632, "beyoncé</w>": 19219, "beyond": 22246, "beyond</w>": 4432, "bez": 28592, "bez</w>": 46764, "bezos</w>": 45000, "bf": 19858, "bf</w>": 7990, "bfc</w>": 37183, "bff</w>": 11984, "bffs</w>": 31462, "bfi</w>": 34244, "bg": 16674, "bg</w>": 11295, "bgc</w>": 47598, "bgs</w>": 47963, "bgt</w>": 40665, "bh": 9930, "bh</w>": 13603, "bha": 6144, "bha</w>": 33068, "bhafc</w>": 30779, "bhagat</w>": 49136, "bhai": 48370, "bhai</w>": 20508, "bhak": 34501, "bham": 31874, "bham</w>": 23491, "bhan": 27356, "bhand": 48679, "bhar": 9108, "bharat": 27454, "bharat</w>": 17430, "bharti</w>": 46803, "bhat": 23784, "bhatt</w>": 36143, "bhav": 44950, "bhi": 28943, "bhi</w>": 21955, "bhk</w>": 45070, "bhm</w>": 38741, "bho": 19721, "bhopal</w>": 44573, "bhp</w>": 29776, "bhs</w>": 29195, "bhu": 9172, "bhuban": 38729, "bhubanes": 41213, "bhubaneswar</w>": 45888, "bhushan</w>": 40884, "bhutan</w>": 32391, "bhutto</w>": 30153, "bi": 717, "bi</w>": 3035, "bia</w>": 3841, "biaf": 26961, "biafra</w>": 36355, "bian": 19531, "bian</w>": 9027, "bianca</w>": 25854, "bianchi</w>": 45720, "bians</w>": 28141, "bias</w>": 11268, "biased</w>": 22178, "bib": 44607, "bib</w>": 21022, "bibi</w>": 31182, "bibl": 20912, "bible": 26738, "bible</w>": 7583, "bibli": 23465, "biblical</w>": 22841, "biblio": 49131, "bic": 5960, "bic</w>": 10675, "bice": 35589, "biceps</w>": 46735, "bick": 27238, "bicy": 9247, "bicycle</w>": 11652, "bicycles</w>": 31326, "bid": 21035, "bid</w>": 5553, "bidding</w>": 23237, "bide</w>": 45178, "biden</w>": 19451, "bids</w>": 16148, "bie": 5561, "bie</w>": 4173, "bieber": 48725, "bieber</w>": 7535, "bien": 19176, "bien</w>": 25742, "biennale</w>": 33776, "biennial</w>": 36609, "bier": 27226, "bier</w>": 23508, "bies</w>": 7867, "big": 1915, "big</w>": 1205, "bigbaldhead</w>": 30325, "bigbang": 41680, "bigbang</w>": 23734, "bigdata</w>": 9440, "bige": 37762, "bigfoot</w>": 37095, "bigg": 15312, "bigg</w>": 35399, "biggboss</w>": 27056, "bigger</w>": 6806, "biggest": 19483, "biggest</w>": 3505, "biggie</w>": 28392, "biggs</w>": 46507, "bigh": 18106, "bighit</w>": 35508, "bigo": 14278, "bigolive</w>": 20735, "bigotry</w>": 37269, "bigre": 36330, "bih</w>": 33471, "bihar</w>": 22849, "bij": 42478, "bik": 30306, "bike": 11686, "bike</w>": 3701, "biker": 36100, "biker</w>": 23449, "bikers</w>": 29468, "bikes</w>": 9227, "bikin": 12638, "biking</w>": 19157, "bikini</w>": 14531, "bil": 3092, "bil</w>": 20506, "bilateral</w>": 25599, "bilbao</w>": 34802, "bild</w>": 35512, "bile</w>": 25943, "bilingual</w>": 29623, "bilities</w>": 13582, "bility</w>": 4694, "bill": 4444, "bill</w>": 2886, "billboard</w>": 10856, "billboards</w>": 34741, "billed</w>": 37558, "billi": 7693, "billie</w>": 23990, "billing</w>": 31797, "billings</w>": 43615, "billion": 14520, "billion</w>": 5729, "billionaire</w>": 19475, "billionaires</w>": 41590, "billions</w>": 20742, "bills</w>": 9810, "billsmafia</w>": 48845, "billy": 15626, "billy</w>": 6814, "bilt": 44770, "bilt</w>": 26654, "bim": 46737, "bim</w>": 24775, "bin": 4849, "bin</w>": 5346, "binance</w>": 43520, "binary</w>": 23497, "bind</w>": 44513, "binder</w>": 30541, "binding</w>": 21287, "bine</w>": 34848, "bing": 24818, "bing</w>": 5665, "binge</w>": 22600, "bingham": 43785, "bingham</w>": 47296, "bingo</w>": 18418, "bino": 29172, "bino</w>": 24313, "bins</w>": 26934, "bint": 43647, "bio": 2830, "bio</w>": 5162, "biode": 43502, "biodegradable</w>": 47740, "biodiversity</w>": 17428, "biof": 45158, "biographical</w>": 49232, "biography</w>": 15423, "biological</w>": 18821, "biologist</w>": 35149, "biology</w>": 9796, "biom": 13010, "biomar": 44549, "biomass</w>": 36746, "biome": 26218, "biomed": 29280, "biomedical</w>": 33117, "bionic</w>": 46201, "biop": 15009, "biopic</w>": 27942, "bios</w>": 48505, "biotech</w>": 22514, "biotechnology</w>": 40375, "biotic</w>": 33773, "biotics</w>": 41371, "bious</w>": 31845, "bipartisan</w>": 32266, "bipolar</w>": 37097, "bique</w>": 27809, "bir": 921, "bir</w>": 16284, "birch": 31569, "birch</w>": 22907, "bird": 6908, "bird</w>": 3329, "birdie</w>": 29612, "birdies</w>": 45618, "birding</w>": 15851, "birdman</w>": 41915, "birdphotography</w>": 47999, "birds": 41951, "birds</w>": 4337, "birdwatching</w>": 33497, "birk": 48289, "birken": 40661, "birmin": 37482, "birmingham": 38580, "birmingham</w>": 7720, "birth": 1128, "birth</w>": 5397, "birthday": 7381, "birthday</w>": 1166, "birthdays</w>": 17954, "birthplace</w>": 31429, "biryani</w>": 46489, "bis": 5064, "bis</w>": 14461, "biscu": 11532, "biscuit</w>": 18731, "biscuits</w>": 18248, "bisexual</w>": 36829, "bish": 33690, "bish</w>": 31461, "bishop": 20625, "bishop</w>": 8024, "bishops</w>": 31579, "bison</w>": 19741, "bistro</w>": 21770, "bit": 3010, "bit</w>": 2010, "bitcoin": 30848, "bitcoin</w>": 6366, "bite</w>": 41613, "biting</w>": 23016, "bits</w>": 7747, "bitt</w>": 39251, "bius</w>": 45525, "bix": 46579, "biz": 8212, "biz</w>": 5431, "biza</w>": 47013, "bizar": 14886, "bizarre</w>": 16965, "bizhour</w>": 39462, "bizitalk</w>": 34929, "bj": 4592, "bj</w>": 18229, "bjj</w>": 27437, "bjor": 26525, "bjp": 37264, "bjp</w>": 6178, "bk": 15099, "bk</w>": 14083, "bkk</w>": 36433, "bl": 833, "bl</w>": 9467, "bla": 2205, "bla</w>": 19630, "blac": 21008, "black": 2025, "black</w>": 1449, "blackand": 12809, "blackandwhite": 23688, "blackandwhite</w>": 19506, "blackandwhitephotography</w>": 27544, "blackberry</w>": 16470, "blackbird</w>": 38526, "blackburn</w>": 23789, "blackfish</w>": 42193, "blackfriday</w>": 16445, "blackgirl": 43591, "blackhawks</w>": 19203, "blackhistory</w>": 46982, "blackhistorymonth</w>": 20135, "blacklist</w>": 30295, "blacklivesmatter</w>": 23467, "blackmail</w>": 47295, "blackops</w>": 43519, "blackout</w>": 21733, "blackpanther</w>": 36592, "blackpink</w>": 20339, "blackpool</w>": 21031, "blacks</w>": 16351, "blackwell</w>": 42642, "blad</w>": 36635, "bladder</w>": 33593, "blade</w>": 10264, "blades</w>": 16893, "blah</w>": 29212, "blaine</w>": 32457, "blair": 31824, "blair</w>": 14749, "blake": 20229, "blake</w>": 9579, "blame</w>": 10695, "blamed</w>": 32906, "blames</w>": 27841, "blaming</w>": 29287, "blan": 4609, "blanc": 30936, "blanc</w>": 13301, "blanca</w>": 40670, "blanchard</w>": 40177, "blanche</w>": 34875, "blanchett</w>": 49378, "blanco</w>": 26801, "bland": 44372, "bland</w>": 30799, "blank</w>": 15134, "blanket</w>": 12878, "blankets</w>": 24042, "blanks</w>": 48599, "blasio</w>": 35553, "blasphe": 36622, "blast": 46349, "blast</w>": 5964, "blasted</w>": 38976, "blaster</w>": 36341, "blasting</w>": 26178, "blasts</w>": 23067, "blat": 22048, "blatant</w>": 41391, "blatt</w>": 39138, "blau": 45307, "blaz": 43413, "blaze</w>": 15497, "blazer</w>": 17606, "blazers</w>": 16984, "blazing</w>": 25267, "bldg</w>": 22981, "ble": 1447, "ble</w>": 1059, "bleach</w>": 27034, "bleak</w>": 40355, "bled</w>": 12006, "bleed": 23027, "bleed</w>": 24791, "bleedblue</w>": 39160, "bleeding</w>": 20311, "bleeds</w>": 47339, "blen": 25651, "blend</w>": 10780, "blended</w>": 25813, "blender</w>": 25066, "blending</w>": 34307, "blends</w>": 28572, "bler": 31305, "bler</w>": 11979, "blers</w>": 26930, "bles</w>": 5763, "bless": 9640, "bless</w>": 5387, "blessed</w>": 4411, "blessing</w>": 10729, "blessings</w>": 11185, "bleu</w>": 30114, "blew</w>": 18176, "bley</w>": 43176, "bli": 1450, "bli</w>": 28051, "blin": 9678, "blin</w>": 5406, "blind": 17248, "blind</w>": 8351, "blinded</w>": 49149, "blindness</w>": 38812, "blinds</w>": 32449, "bling": 39764, "bling</w>": 7097, "blink</w>": 18976, "bliss": 28531, "bliss</w>": 12893, "blissful</w>": 42145, "blit": 39327, "blitz": 42151, "blitz</w>": 17548, "blizz": 13075, "blizzard</w>": 16111, "blk": 42950, "blk</w>": 22872, "blm</w>": 30957, "bln</w>": 47348, "blo": 1204, "blo</w>": 25505, "blob</w>": 49312, "bloc</w>": 30961, "block": 4638, "block</w>": 4593, "blockade</w>": 33489, "blockbuster</w>": 19939, "blockchain</w>": 6653, "blocked</w>": 9106, "blocker</w>": 44767, "blocking</w>": 12652, "blocks</w>": 10113, "blog": 16376, "blog</w>": 2589, "blogg": 33282, "blogged</w>": 41380, "blogger": 21352, "blogger</w>": 7806, "bloggerrt</w>": 48898, "bloggers</w>": 11627, "blogging</w>": 18090, "blogpost</w>": 41842, "blogs</w>": 16682, "bloke</w>": 24384, "blom": 48996, "blon": 7958, "blond</w>": 32426, "blonde</w>": 10711, "blondes</w>": 45130, "blondie</w>": 39236, "bloo": 2373, "blood": 9231, "blood</w>": 3590, "blooded</w>": 41946, "bloodh": 48480, "bloods</w>": 39539, "bloody": 38568, "bloody</w>": 9468, "bloom": 7311, "bloom</w>": 10257, "bloomberg": 43109, "bloomberg</w>": 21238, "bloomfield</w>": 40342, "blooming": 45175, "blooming</w>": 19266, "bloomington</w>": 34731, "blooms</w>": 21439, "bloss": 10017, "blossom</w>": 14472, "blossoms</w>": 21916, "blot</w>": 41710, "blou": 44506, "blouse</w>": 23525, "blow": 15230, "blow</w>": 10211, "blower</w>": 25832, "blowing</w>": 12087, "blown</w>": 11848, "blowout</w>": 34857, "blows</w>": 21063, "blr</w>": 47250, "bls</w>": 39458, "blu": 1263, "blu</w>": 10273, "blue": 3829, "blue</w>": 1746, "bluebells</w>": 47150, "blueberries</w>": 29551, "blueberry</w>": 18251, "bluebird</w>": 40747, "bluec": 43194, "bluef": 41174, "bluegrass</w>": 26241, "bluejays</w>": 18684, "blueprint</w>": 30594, "blues": 17566, "blues</w>": 5159, "blueslyrix</w>": 47068, "bluet": 13469, "bluetooth</w>": 14052, "bluewave</w>": 40025, "bluff</w>": 27232, "bluffs</w>": 48844, "blum</w>": 34818, "blumen": 38714, "blun": 34472, "blunt</w>": 19305, "blur": 12102, "blur</w>": 27976, "bluray</w>": 36818, "blurred</w>": 38013, "blurry</w>": 21977, "blush</w>": 22889, "blvd</w>": 12578, "bly": 20930, "bly</w>": 4426, "bm": 4773, "bm</w>": 15916, "bma</w>": 42573, "bmc</w>": 27807, "bmi</w>": 40642, "bmo</w>": 39083, "bms</w>": 34074, "bmw": 26637, "bmw</w>": 7869, "bmx</w>": 22535, "bn": 10496, "bn</w>": 7992, "bnb</w>": 20010, "bnha</w>": 49336, "bnp</w>": 47910, "bnw</w>": 35903, "bo": 647, "bo</w>": 2525, "boa</w>": 14732, "boar": 7837, "boar</w>": 35473, "board": 10419, "board</w>": 1972, "boarded</w>": 43052, "boarder</w>": 37414, "boardgame</w>": 47829, "boardgames</w>": 32646, "boarding</w>": 10086, "boardroom</w>": 47937, "boards</w>": 7963, "boardwalk</w>": 29043, "boast</w>": 44467, "boasts</w>": 30309, "boat": 12426, "boat</w>": 4440, "boath": 45461, "boating</w>": 21951, "boats</w>": 10080, "boatsales</w>": 46244, "bob": 8444, "bob</w>": 4423, "boba</w>": 39948, "bobb": 16891, "bobble</w>": 38796, "bobblehead</w>": 33451, "bobby": 17847, "bobby</w>": 7816, "bobc": 26153, "bobcat</w>": 37896, "bobcats</w>": 27568, "bobo</w>": 38939, "bobs</w>": 45533, "boc": 27307, "boc</w>": 39042, "boca</w>": 26094, "bock</w>": 24961, "bod": 17904, "bod</w>": 26340, "boda</w>": 42030, "bode": 28452, "bode</w>": 40429, "bodega</w>": 47350, "bodied</w>": 36892, "bodies</w>": 9799, "bodily</w>": 49119, "body": 7132, "body</w>": 1774, "bodybuilding</w>": 24538, "bodyguard</w>": 35565, "boe": 23476, "boe</w>": 21773, "boeh": 38002, "boehner</w>": 44599, "boeing": 48135, "boeing</w>": 11857, "boer": 44889, "boer</w>": 40768, "bog": 23426, "bog</w>": 28318, "bogo</w>": 35769, "bogota</w>": 47059, "bogus</w>": 42907, "boh": 43238, "bohe": 40541, "bohemi": 21552, "bohemian</w>": 25753, "boho</w>": 25444, "boi": 37129, "boi</w>": 12673, "boil</w>": 31332, "boiled</w>": 23886, "boiler": 28212, "boiler</w>": 25615, "boiling</w>": 32019, "bois": 47742, "bois</w>": 21640, "boise</w>": 23304, "bok": 26671, "bok</w>": 15289, "boko</w>": 30929, "boks</w>": 40216, "bol": 2860, "bol</w>": 8413, "bola</w>": 12840, "bold": 26975, "bold</w>": 8911, "boldand": 48413, "boldly</w>": 44778, "boli": 12722, "bolic</w>": 27343, "bolivia</w>": 28628, "bollah</w>": 36336, "bolly": 25302, "bollywood": 32448, "bollywood</w>": 9604, "bolo</w>": 40236, "bolog": 22818, "bologna</w>": 27513, "bolster</w>": 47304, "bolt</w>": 13131, "bolton": 48757, "bolton</w>": 16598, "bolts</w>": 26028, "bom": 3012, "bom</w>": 19469, "bomb": 18091, "bomb</w>": 6331, "bombar": 25544, "bombardier</w>": 42700, "bombay": 48602, "bombay</w>": 23890, "bombed</w>": 24542, "bomber</w>": 15436, "bombers</w>": 21786, "bombing</w>": 14475, "bombings</w>": 43236, "bombs</w>": 14410, "bombshell</w>": 36340, "bon": 1871, "bon</w>": 4216, "bona</w>": 33342, "bonanza</w>": 40304, "bond": 37022, "bond</w>": 6826, "bonded</w>": 37390, "bondi</w>": 40092, "bonding</w>": 19609, "bonds</w>": 15786, "bone": 22502, "bone</w>": 6195, "bones</w>": 9476, "bonfire</w>": 23151, "bongo</w>": 47519, "boni": 32269, "boni</w>": 46356, "bonita</w>": 42896, "bonjour</w>": 33176, "bonkers</w>": 39865, "bonn</w>": 38969, "bonnar": 47191, "bonnaroo</w>": 48777, "bonne</w>": 25844, "bonnet</w>": 30636, "bonnie</w>": 18555, "bono</w>": 24476, "bons</w>": 42883, "bonsai</w>": 44129, "bonus</w>": 8164, "bonuses</w>": 35144, "boo": 824, "boo</w>": 7317, "boogie</w>": 22639, "book": 2828, "book</w>": 1116, "bookboost</w>": 31257, "bookclub</w>": 34438, "bookday</w>": 26327, "booked</w>": 12584, "booker</w>": 21302, "bookfest</w>": 39381, "booking</w>": 10145, "bookings</w>": 18345, "booklet</w>": 27405, "bookmark</w>": 33596, "bookof": 45629, "bookreview</w>": 27362, "books": 44382, "books</w>": 2161, "bookshelf</w>": 34821, "bookshop</w>": 24705, "bookstore</w>": 17999, "bookstores</w>": 46416, "bookworm</w>": 20743, "boom": 9609, "boom</w>": 7121, "boomer": 33819, "boomer</w>": 31766, "boomers</w>": 37988, "booming</w>": 33487, "boon": 24979, "boon</w>": 35821, "boone</w>": 23453, "boop</w>": 45047, "boost": 44639, "boost</w>": 6260, "boosted</w>": 37631, "booster</w>": 20877, "boosters</w>": 46859, "boosting</w>": 28480, "boosts</w>": 29247, "boot": 10843, "boot</w>": 8087, "bootcamp</w>": 22051, "booted</w>": 42564, "booth": 47895, "booth</w>": 3971, "booths</w>": 32653, "booties</w>": 46188, "bootleg</w>": 38139, "boots</w>": 7319, "booze</w>": 24341, "bop</w>": 19720, "bor": 1141, "bor</w>": 15093, "bora</w>": 24736, "bord</w>": 36891, "bordeaux</w>": 22009, "border": 16304, "border</w>": 6177, "borderlands</w>": 38676, "borders</w>": 13900, "bore": 14084, "bore</w>": 24638, "bored</w>": 8933, "boredom</w>": 31460, "boretum</w>": 38902, "borg</w>": 14770, "borgh": 17180, "boring</w>": 12519, "boris": 31212, "boris</w>": 15704, "borisjohnson</w>": 44481, "born": 17695, "born</w>": 2683, "borne": 42910, "borne</w>": 9328, "borneo</w>": 33332, "bornon": 41811, "bornonthisday</w>": 42757, "boro": 26796, "boro</w>": 7974, "borough": 22761, "borough</w>": 6203, "borrow</w>": 22293, "borrowed</w>": 28224, "borrowing</w>": 41045, "borussia</w>": 36764, "bos": 14885, "bos</w>": 9644, "bosa</w>": 46946, "bosch": 42009, "bosch</w>": 19466, "bosco</w>": 36960, "bose</w>": 23142, "bosh</w>": 42244, "bosni": 42924, "bosnia</w>": 31396, "boss": 17935, "boss</w>": 4206, "bosses</w>": 23906, "boston": 11540, "boston</w>": 4399, "bostonmarathon</w>": 44533, "bot": 4136, "bot</w>": 6947, "botan": 12554, "botanic</w>": 32560, "botanical</w>": 21026, "botany</w>": 22612, "botd</w>": 34451, "both": 36575, "both</w>": 2212, "bother</w>": 21125, "bothered</w>": 27997, "botox</w>": 43449, "bots</w>": 13721, "botswana</w>": 27584, "bott": 3520, "bott</w>": 37225, "bottle": 37306, "bottle</w>": 5392, "bottled</w>": 29331, "bottlen": 46439, "bottles</w>": 9754, "bottling</w>": 42006, "bottom": 32314, "bottom</w>": 5931, "bottoms</w>": 31524, "bou": 3728, "bou</w>": 23165, "bouchard</w>": 47930, "boudo": 48827, "bought</w>": 4142, "boul": 24830, "boulder</w>": 18260, "boule": 17652, "boulevard</w>": 19504, "boun": 5993, "bounce</w>": 14316, "bouncing</w>": 32060, "bouncy</w>": 43415, "bound": 15140, "bound</w>": 4567, "boundaries</w>": 18690, "boundary</w>": 21344, "bounds</w>": 37469, "bounty</w>": 21142, "bouquet</w>": 20961, "bour": 2934, "bour</w>": 35486, "bourbon": 48118, "bourbon</w>": 14652, "bourdain</w>": 48095, "bourg</w>": 20690, "bourgeo": 45672, "bourn</w>": 39143, "bourne": 13789, "bourne</w>": 5192, "bournemouth</w>": 20911, "bout": 19982, "bout</w>": 8123, "bouti": 10926, "boutique</w>": 12179, "bow": 2297, "bow</w>": 4040, "bowden</w>": 48538, "bowed</w>": 49130, "bowel</w>": 36880, "bowen</w>": 25368, "bower</w>": 40414, "bowers</w>": 42238, "bowie</w>": 13036, "bowing</w>": 46398, "bowl": 26719, "bowl</w>": 3814, "bowled</w>": 39987, "bowler</w>": 25528, "bowlers</w>": 42632, "bowles</w>": 41611, "bowling</w>": 10390, "bowls</w>": 17787, "bowman</w>": 22052, "bows</w>": 17000, "bowser</w>": 38234, "bowski</w>": 48311, "box": 2774, "box</w>": 2063, "boxed</w>": 24190, "boxer": 40394, "boxer</w>": 15363, "boxers</w>": 31019, "boxes</w>": 8350, "boxing": 33669, "boxing</w>": 5554, "boy": 2927, "boy</w>": 1876, "boyband</w>": 31568, "boyce</w>": 44480, "boycot": 46208, "boycott": 31615, "boycott</w>": 19559, "boyd</w>": 18295, "boyfriend</w>": 7328, "boyfriends</w>": 36541, "boyle</w>": 22802, "boys": 25223, "boys</w>": 2034, "boyz</w>": 16152, "bp": 23410, "bp</w>": 11558, "bpa</w>": 43855, "bpd</w>": 48587, "bpl</w>": 28901, "bpm</w>": 40338, "bps</w>": 37794, "br": 711, "br</w>": 7532, "bra": 1195, "bra</w>": 5860, "brac": 6663, "brace": 8376, "brace</w>": 9183, "bracelet</w>": 8969, "bracelets</w>": 20027, "braces</w>": 19249, "brack": 25676, "bracket</w>": 14780, "brackets</w>": 36183, "brad": 4848, "brad</w>": 9405, "bradbury</w>": 45097, "braden</w>": 46842, "bradford</w>": 15062, "bradley": 31905, "bradley</w>": 10952, "brador</w>": 24062, "bradshaw</w>": 37556, "brady": 42494, "brady</w>": 11117, "brae": 42874, "brae</w>": 40040, "brag</w>": 30110, "bragg</w>": 38545, "bragging</w>": 38199, "brah": 20276, "brahms</w>": 45114, "brai": 25048, "braid</w>": 31067, "braided</w>": 39997, "braids</w>": 34221, "brain": 9454, "brain</w>": 4812, "brains</w>": 17129, "brainstorming</w>": 36607, "braised</w>": 28363, "brake</w>": 14937, "brakes</w>": 23456, "bral</w>": 31309, "bram": 14815, "bram</w>": 39456, "brampton</w>": 35124, "bran": 3684, "bran</w>": 28348, "brance": 36072, "brance</w>": 15413, "branch</w>": 7998, "branches</w>": 15843, "brand": 3910, "brand</w>": 2896, "branded</w>": 18097, "brandi</w>": 41003, "branding</w>": 10841, "brando</w>": 41892, "brandon": 20423, "brandon</w>": 9166, "brands</w>": 8681, "brandt</w>": 22552, "brandy</w>": 26232, "brane</w>": 32340, "branson</w>": 28280, "brant": 28951, "brant</w>": 47592, "braries</w>": 46377, "brary</w>": 24520, "bras</w>": 22611, "brasil</w>": 18991, "brass": 24348, "brass</w>": 11655, "brat": 26717, "brat</w>": 26631, "brate</w>": 41864, "braun": 39129, "braun</w>": 29309, "brave": 25461, "brave</w>": 7769, "braved</w>": 47663, "bravely</w>": 42303, "bravery</w>": 25831, "braves</w>": 14422, "braving</w>": 43258, "bravo": 38613, "bravo</w>": 13006, "braw": 37871, "brawl</w>": 26066, "braxton</w>": 37451, "bray": 26256, "bray</w>": 22993, "braz": 4625, "brazil": 47459, "brazil</w>": 6305, "brazili": 45697, "brazilian</w>": 12111, "brb</w>": 25316, "brc</w>": 40393, "bre": 887, "bre</w>": 7782, "brea": 7318, "brea</w>": 46538, "breach</w>": 21363, "breaches</w>": 45173, "bread": 18886, "bread</w>": 5066, "breads</w>": 43064, "break": 2206, "break</w>": 2568, "breakable</w>": 30691, "breakaway</w>": 42732, "breakdown</w>": 14519, "breaker</w>": 14814, "breakers</w>": 22270, "breakfa": 45931, "breakfast": 30210, "breakfast</w>": 3290, "breaking": 14698, "breaking</w>": 2755, "breakingbad</w>": 38032, "breakingnews</w>": 23837, "breakout</w>": 16752, "breaks</w>": 7263, "breakthrough</w>": 18802, "breakup</w>": 38931, "breast": 12930, "breast</w>": 9475, "breastcancer": 40813, "breastcancer</w>": 30065, "breastfeeding</w>": 29033, "breasts</w>": 37637, "breath": 9508, "breath</w>": 9576, "breathe</w>": 11364, "breathing</w>": 14959, "breathtaking</w>": 14709, "brecht</w>": 34622, "breck": 44598, "bred": 46929, "bred</w>": 16008, "bree": 7892, "bree</w>": 37138, "breed": 28030, "breed</w>": 13791, "breeders</w>": 37472, "breeding</w>": 16544, "breeds</w>": 29021, "breen</w>": 48013, "brees</w>": 46721, "breeze</w>": 13125, "breezy</w>": 21451, "breit": 23864, "breitbart</w>": 37926, "brek": 35494, "bremen</w>": 39861, "bren": 5209, "brenda</w>": 23786, "brendan": 35134, "brendan</w>": 15414, "brendon</w>": 36756, "brennan</w>": 22372, "brenner</w>": 42941, "brent": 31439, "brent</w>": 16355, "brentwood</w>": 33108, "brero</w>": 47781, "bres</w>": 32561, "bret": 38020, "bret</w>": 32548, "brethren</w>": 43134, "breton</w>": 32290, "brett": 22591, "brett</w>": 12394, "brev</w>": 42882, "brevi": 39475, "brew": 5048, "brew</w>": 7253, "brewco</w>": 33582, "brewed</w>": 23238, "brewer</w>": 20756, "breweries</w>": 35277, "brewers</w>": 17618, "brewery</w>": 8850, "brewing</w>": 8275, "brewingco</w>": 45155, "brews</w>": 21663, "brewster</w>": 40274, "brex": 22726, "brexit": 27666, "brexit</w>": 5801, "brgy</w>": 35983, "bri": 1036, "bri</w>": 18636, "bria</w>": 35890, "brian": 9824, "brian</w>": 4989, "brianna</w>": 32308, "briar</w>": 46119, "bribe</w>": 40042, "bribery</w>": 41792, "bric": 27055, "brice</w>": 40190, "brick": 13937, "brick</w>": 9518, "bricks</w>": 21029, "brics</w>": 48196, "brid": 16995, "bridal": 36875, "bridal</w>": 14284, "bride": 18342, "bride</w>": 8964, "brides</w>": 18067, "bridesma": 28356, "bridesmaid</w>": 43399, "bridesmaids</w>": 47754, "bridg": 20623, "bridge": 8647, "bridge</w>": 2465, "bridgeport</w>": 45201, "bridges</w>": 11811, "bridget</w>": 27073, "bridgewater</w>": 38732, "bridging</w>": 38109, "brie</w>": 26622, "brief": 9435, "brief</w>": 8954, "briefed</w>": 47326, "briefing</w>": 12991, "briefly</w>": 26980, "briefs</w>": 29557, "brien</w>": 13504, "brier</w>": 43995, "brig": 11081, "briga": 46448, "brigade</w>": 16032, "briggs</w>": 28108, "brigh": 6710, "bright": 10383, "bright</w>": 4852, "brighten</w>": 18208, "brightening</w>": 43929, "brighter</w>": 18507, "brightest</w>": 26159, "brightly</w>": 36298, "brightness</w>": 42280, "brighton": 28416, "brighton</w>": 9470, "brigitte</w>": 44421, "brill": 27342, "brill</w>": 28601, "brilli": 3821, "brilliance</w>": 28146, "brilliant</w>": 4106, "brilliantly</w>": 26803, "brin": 25620, "bring": 11596, "bring</w>": 2430, "bringback": 28969, "bringbackour": 45403, "bringing</w>": 4777, "brings</w>": 5138, "brink": 39296, "brink</w>": 28796, "brioche</w>": 45818, "bris": 9385, "bris</w>": 15783, "brisban": 30431, "brisbane": 42932, "brisbane</w>": 12407, "brisk</w>": 43646, "brisket</w>": 31920, "bristol": 18159, "bristol</w>": 8010, "brit": 2318, "brit</w>": 20066, "britain": 40802, "britain</w>": 6272, "britanni": 31373, "britannia</w>": 36188, "brite</w>": 33827, "briti": 8155, "british": 8651, "british</w>": 3504, "britishmuseum</w>": 41858, "britney": 37192, "britney</w>": 21853, "britneyspears</w>": 42990, "brits</w>": 21832, "britt": 10811, "britt</w>": 25976, "brittany": 38187, "brittany</w>": 18818, "britton</w>": 37422, "brium</w>": 46079, "brixton</w>": 30056, "bro": 927, "bro</w>": 4410, "broad": 3491, "broad</w>": 12623, "broadband</w>": 21050, "broadcast</w>": 8967, "broadcaster</w>": 29005, "broadcasting</w>": 14403, "broadcasts</w>": 46742, "broader</w>": 36029, "broadway": 34599, "broadway</w>": 9092, "broc": 15587, "broccoli</w>": 19094, "broch": 21419, "brochure</w>": 25275, "brock": 14841, "brock</w>": 16745, "brodie</w>": 42150, "brody</w>": 29608, "broke": 42165, "broke</w>": 6509, "broken": 26126, "broken</w>": 5107, "broker": 34032, "broker</w>": 20449, "brokerage</w>": 41327, "brokers</w>": 28271, "brom": 18972, "brom</w>": 33296, "bromance</w>": 35353, "bromley</w>": 35715, "bron": 4011, "bron</w>": 10243, "bronco": 43488, "bronco</w>": 34370, "broncos</w>": 12516, "bronson</w>": 37042, "bronte</w>": 48936, "bronx": 48310, "bronx</w>": 17183, "brony</w>": 21084, "bronze</w>": 8459, "broo": 5204, "brooch</w>": 21207, "brook": 4782, "brook</w>": 7322, "brooke": 28576, "brooke</w>": 12549, "brookes</w>": 39707, "brooklyn": 23253, "brooklyn</w>": 6983, "brooks": 42779, "brooks</w>": 9991, "broom": 32046, "broom</w>": 28008, "broome</w>": 49335, "bros</w>": 7776, "broth</w>": 29994, "brotha</w>": 33974, "brother": 12697, "brother</w>": 3157, "brotherhood</w>": 19059, "brothers</w>": 4548, "brou": 27874, "brough</w>": 21033, "brought</w>": 4222, "brov": 42881, "brow": 6547, "brow</w>": 15895, "broward</w>": 34719, "brown": 6315, "brown</w>": 2866, "browne</w>": 28440, "brownie</w>": 23045, "brownies</w>": 22312, "browning</w>": 32241, "browns</w>": 14051, "brows</w>": 14998, "browse</w>": 19060, "browser</w>": 19768, "browsing</w>": 29318, "brox</w>": 43539, "brs</w>": 47485, "brt</w>": 46936, "bru": 1698, "bru</w>": 31028, "bruce": 21223, "bruce</w>": 7085, "bruh</w>": 17575, "bruins</w>": 14736, "bruise</w>": 48048, "bruised</w>": 46502, "brum": 23862, "brum</w>": 28078, "brun": 6870, "brunch</w>": 9113, "brune": 29057, "brunei</w>": 41898, "brunette</w>": 35528, "bruno</w>": 14568, "brunomars</w>": 41156, "brunswick</w>": 24012, "brush": 27969, "brush</w>": 8594, "brushed</w>": 30298, "brushes</w>": 21550, "brushing</w>": 35072, "brussels</w>": 11020, "brut</w>": 39499, "brutal": 42144, "brutal</w>": 14556, "brutality</w>": 31348, "brutally</w>": 28132, "brute</w>": 47552, "brux": 49093, "bry": 6587, "bry</w>": 28228, "bryan": 16134, "bryan</w>": 10412, "bryant</w>": 12256, "bryce</w>": 19895, "bryn": 36569, "bryn</w>": 42877, "bryson</w>": 38990, "bs": 11783, "bs</w>": 1329, "bsa</w>": 46619, "bsb</w>": 23070, "bsbi": 41728, "bsbibotany</w>": 42086, "bsc</w>": 32031, "bsd</w>": 41848, "bse</w>": 46341, "bsf</w>": 48314, "bsgo</w>": 48474, "bsp</w>": 47977, "bst</w>": 19698, "bsu</w>": 46385, "bt": 3317, "bt</w>": 4205, "btc</w>": 10315, "btcc</w>": 30759, "btn</w>": 44681, "bto</w>": 35516, "btob</w>": 29379, "btr</w>": 39767, "bts": 15154, "bts</w>": 4007, "btsarmy</w>": 30302, "btsbbmas</w>": 35297, "btsx": 44971, "btv</w>": 38541, "btw</w>": 9520, "btwn</w>": 28284, "bu": 609, "bu</w>": 5831, "bub": 27704, "bub</w>": 33158, "bubb": 9739, "bubba</w>": 28149, "bubble": 28687, "bubble</w>": 10799, "bubblegum</w>": 48078, "bubbles</w>": 17648, "bubbly</w>": 31034, "buc": 8207, "buccane": 32830, "buccaneers</w>": 38058, "buch": 22623, "bucha</w>": 43582, "buchan": 27237, "buchanan</w>": 28975, "bucharest</w>": 37013, "buck": 6061, "buck</w>": 11433, "bucket": 22596, "bucket</w>": 10498, "bucketlist</w>": 30778, "buckets</w>": 27168, "buckeye</w>": 34549, "buckeyes</w>": 30741, "buckingham</w>": 28736, "buckle</w>": 21948, "buckley</w>": 25905, "bucks</w>": 6103, "bucky</w>": 35916, "bucs</w>": 20011, "bud": 2942, "bud</w>": 10737, "buda": 18520, "buda</w>": 49012, "budapest</w>": 19202, "budd": 7296, "buddha</w>": 13981, "buddhism</w>": 23744, "buddhist</w>": 18697, "buddies</w>": 14543, "budding</w>": 31992, "buddy": 40948, "buddy</w>": 6557, "budge": 32005, "budget": 46758, "budget</w>": 5639, "budgeting</w>": 43789, "budgets</w>": 36419, "buds</w>": 14665, "budweiser</w>": 40900, "buen": 15640, "buena</w>": 30876, "buenas</w>": 48529, "bueno</w>": 46202, "buenos</w>": 26055, "buf": 44417, "buff": 5456, "buff</w>": 21416, "buffal": 25836, "buffalo": 31231, "buffalo</w>": 8054, "buffalob": 38831, "buffalobills</w>": 44352, "buffe": 13724, "buffer</w>": 33050, "buffet</w>": 17829, "buffett</w>": 34081, "buffs</w>": 28906, "buffy</w>": 33356, "bug": 14453, "bug</w>": 8162, "bugatti</w>": 35451, "buggy</w>": 28963, "bugs</w>": 13850, "buh</w>": 31406, "buhari</w>": 14661, "buick</w>": 22000, "buil": 1354, "build": 22739, "build</w>": 3289, "builder</w>": 14474, "builders</w>": 17694, "building": 21206, "building</w>": 2307, "buildings</w>": 8866, "builds</w>": 16449, "buildthe": 41497, "built": 45824, "built</w>": 3874, "buk": 28084, "buk</w>": 24317, "buka</w>": 47778, "bukit</w>": 39888, "bul": 2572, "bul</w>": 10200, "bula</w>": 18726, "bulaga</w>": 41575, "bular</w>": 32187, "bulb</w>": 22373, "bulbs</w>": 24808, "bulgar": 15424, "bulgaria</w>": 20295, "bulgarian</w>": 38693, "bulge</w>": 47603, "bulk</w>": 19643, "bull": 4537, "bull</w>": 6029, "bulldo": 37675, "bulldog": 34828, "bulldog</w>": 15611, "bulldogs</w>": 13916, "bullet": 14340, "bullet</w>": 12465, "bulletin</w>": 19638, "bulletproof</w>": 43212, "bullets</w>": 22117, "bullied</w>": 34689, "bullies</w>": 39050, "bullion</w>": 49114, "bullish</w>": 22142, "bullock</w>": 33198, "bullpen</w>": 38081, "bulls</w>": 10313, "bully": 43111, "bully</w>": 20190, "bullying</w>": 13548, "bum": 27683, "bum</w>": 14226, "bumble": 25585, "bumble</w>": 39303, "bumblebee</w>": 36911, "bummed</w>": 48456, "bump": 9783, "bump</w>": 15877, "bumped</w>": 22495, "bumper</w>": 17881, "bumping</w>": 40196, "bumps</w>": 21115, "bun": 2591, "bun</w>": 13665, "bunch</w>": 7796, "bund</w>": 41905, "bunde": 18841, "bundesliga</w>": 21582, "bundle</w>": 11793, "bundled</w>": 47228, "bundles</w>": 29834, "bundy</w>": 37332, "bung</w>": 44748, "bungal": 29549, "bungalow</w>": 33696, "bunk</w>": 41236, "bunker</w>": 23615, "bunnies</w>": 28998, "bunny": 34198, "bunny</w>": 9258, "buns</w>": 22235, "bunting</w>": 30695, "buon": 31350, "buon</w>": 48498, "bur": 1039, "bur</w>": 17362, "burbank</w>": 34862, "burberry</w>": 30412, "burch</w>": 44588, "burden</w>": 18687, "bure": 11902, "bureau": 32098, "bureau</w>": 15400, "burg": 19505, "burg</w>": 3499, "burge": 20522, "burger": 22356, "burger</w>": 6548, "burgers</w>": 13007, "burgess</w>": 26211, "burgh": 18141, "burgh</w>": 4965, "burgl": 25554, "burglar</w>": 43365, "burglary</w>": 32573, "burgring</w>": 40823, "burgundy</w>": 23650, "buri": 46348, "buri</w>": 42614, "burial</w>": 22012, "buried</w>": 14233, "burk": 48822, "burke</w>": 15340, "burle": 27891, "burlesque</w>": 33732, "burlington</w>": 23370, "burma</w>": 30305, "burmese</w>": 47906, "burn": 7934, "burn</w>": 4285, "burnaby</w>": 47541, "burne</w>": 27246, "burned</w>": 15022, "burner</w>": 23243, "burnett</w>": 28558, "burnham</w>": 36111, "burning": 46107, "burning</w>": 8405, "burnley</w>": 24653, "burnout</w>": 36078, "burns</w>": 10234, "burnt</w>": 15185, "burr</w>": 30879, "burrell</w>": 49045, "burrito</w>": 23473, "burritos</w>": 47245, "burroughs</w>": 41337, "burrows</w>": 44846, "burst</w>": 13005, "bursting</w>": 32566, "bursts</w>": 37026, "burt</w>": 27162, "burton": 42354, "burton</w>": 12704, "burundi</w>": 33595, "bury": 12276, "bury</w>": 3899, "burys</w>": 32362, "bus": 1319, "bus</w>": 2840, "busan</w>": 40172, "busc": 35000, "busch</w>": 20475, "buses</w>": 12879, "bush": 11191, "bush</w>": 6867, "bushes</w>": 37578, "busiest</w>": 32764, "busine": 4598, "busines": 25364, "business": 8346, "business</w>": 1716, "businesses</w>": 7287, "businessman</w>": 25635, "buss</w>": 47764, "bust": 31299, "bust</w>": 9959, "busted</w>": 18643, "buster": 37219, "buster</w>": 12094, "busters</w>": 16362, "busting</w>": 29622, "busy": 39332, "busy</w>": 4354, "but": 2201, "but</w>": 767, "butch</w>": 35102, "butcher</w>": 18732, "butchers</w>": 42334, "bute</w>": 39240, "butes</w>": 14630, "butler": 35867, "butler</w>": 10702, "butt": 12500, "butt</w>": 31523, "butte</w>": 31678, "butter": 5427, "butter</w>": 6952, "butterflies</w>": 16232, "butterfly</w>": 9738, "buttermilk</w>": 40180, "butternut</w>": 36867, "buttery</w>": 45535, "button": 45480, "button</w>": 8007, "buttons</w>": 16188, "butts</w>": 25309, "buu": 42313, "buuren</w>": 47752, "buxton</w>": 41370, "buy": 11632, "buy</w>": 2131, "buyer</w>": 14682, "buyers</w>": 14663, "buying</w>": 6566, "buys</w>": 15560, "buzz": 7866, "buzz</w>": 8706, "buzzard</w>": 47434, "buzzer</w>": 38064, "buzzfeed</w>": 26613, "buzzing</w>": 18511, "bv": 18958, "bv</w>": 35861, "bvb</w>": 22454, "bw": 17672, "bw</w>": 15120, "bway</w>": 26652, "bwfc</w>": 40918, "bwo": 45902, "bx</w>": 33633, "by": 1713, "by</w>": 638, "bye": 20076, "bye</w>": 4460, "byes</w>": 47958, "byl</w>": 34994, "byn": 46917, "byn</w>": 11890, "byo": 28039, "bypass</w>": 26530, "byr": 15534, "byrd</w>": 30369, "byrne</w>": 19676, "byron": 43504, "byron</w>": 19775, "bys</w>": 26740, "bystand": 46138, "byte</w>": 42798, "bytes</w>": 39538, "bythe": 36621, "byu": 41072, "byu</w>": 23770, "byz": 35406, "byzantine</w>": 44081, "bz</w>": 13631, "bé": 40365, "bü": 38706, "c": 66, "c</w>": 322, "ca": 772, "ca</w>": 1684, "caa</w>": 19316, "cab": 3033, "cab</w>": 11912, "cabaret</w>": 26263, "cabbage</w>": 18407, "cabe</w>": 32731, "cabello</w>": 34371, "caber": 29062, "cabernet</w>": 33730, "cabin</w>": 14178, "cabine": 23354, "cabinet</w>": 9937, "cabinets</w>": 33083, "cabins</w>": 48455, "cable</w>": 7925, "cables</w>": 22408, "cabo": 37318, "cabo</w>": 28370, "cabrera</w>": 42338, "cabs</w>": 42048, "cac": 8298, "cac</w>": 23872, "cacao</w>": 38022, "cache</w>": 28993, "caching</w>": 40655, "cactus</w>": 19794, "cad": 6297, "cad</w>": 20166, "caday</w>": 34187, "cadbury</w>": 44698, "caddy</w>": 41521, "cade": 10497, "cade</w>": 17306, "cadet</w>": 22764, "cadets</w>": 19160, "cadillac</w>": 18156, "cae</w>": 49264, "caer": 28298, "caes": 15740, "caesar</w>": 21642, "caesars</w>": 42162, "caf": 3471, "caf</w>": 20867, "cafc</w>": 30748, "cafe": 15201, "cafe</w>": 4979, "cafes</w>": 40166, "cafeteria</w>": 32817, "caffe": 18258, "caffe</w>": 45416, "caffeine</w>": 22487, "café</w>": 15304, "cag": 15714, "cage</w>": 11838, "cages</w>": 37939, "cah</w>": 40519, "cahill</w>": 33185, "cai": 38971, "cai</w>": 36116, "cain</w>": 13747, "caine</w>": 16799, "cair": 15804, "cair</w>": 46659, "cairn": 31264, "cairn</w>": 42467, "cairngor": 44067, "cairns</w>": 32941, "cairo</w>": 19615, "cait": 14116, "caitlin": 47768, "caitlin</w>": 26809, "caitlyn</w>": 35763, "cajun": 43425, "cajun</w>": 33044, "cak</w>": 42986, "cake": 15295, "cake</w>": 2972, "cakeday</w>": 46207, "cakes</w>": 5950, "cal": 1198, "cal</w>": 6372, "cala</w>": 32133, "calab": 31795, "calais</w>": 39886, "calam": 28841, "calc</w>": 45055, "calci": 22824, "calcium</w>": 27815, "calcu": 15328, "calcul": 15734, "calculate</w>": 37656, "calculated</w>": 40688, "calculations</w>": 44605, "calculator</w>": 26093, "calculus</w>": 35104, "calcutta</w>": 42901, "calder": 29372, "calder</w>": 36817, "caldwell</w>": 30484, "cale</w>": 32674, "caleb</w>": 19619, "caled": 28421, "calend": 6057, "calendar</w>": 7122, "calendars</w>": 17229, "calf</w>": 17508, "calgary": 27415, "calgary</w>": 10797, "calhoun</w>": 38929, "cali": 2857, "cali</w>": 16337, "caliber</w>": 32820, "calibr": 32597, "calico</w>": 45379, "calif</w>": 30839, "califor": 3526, "californi": 21303, "california</w>": 3729, "call": 7950, "call</w>": 1620, "calla": 20658, "callahan</w>": 43313, "callaway</w>": 42596, "callback</w>": 44764, "calle": 47699, "calle</w>": 38144, "called</w>": 2726, "caller</w>": 30666, "calli": 16338, "callie</w>": 36512, "calligraphy</w>": 27775, "calling</w>": 4597, "callister</w>": 49026, "callme": 42449, "callof": 41280, "calls</w>": 4572, "callum</w>": 23224, "calm": 34990, "calm</w>": 7011, "calming</w>": 30690, "calorie</w>": 32679, "calories</w>": 18029, "cals</w>": 47714, "calum</w>": 16405, "calvary</w>": 40169, "calvert</w>": 47134, "calves</w>": 31857, "calvin": 27642, "calvin</w>": 17345, "caly": 10244, "calyp": 29851, "cam": 1004, "cam</w>": 5982, "camar": 31991, "camber": 44362, "cambo": 14662, "cambodia</w>": 17347, "cambridge": 24651, "cambridge</w>": 9334, "cambridgeshire</w>": 46139, "camden": 38735, "camden</w>": 17984, "came</w>": 1986, "camel": 27005, "camel</w>": 21914, "camels</w>": 41357, "cameo</w>": 19492, "camer": 4961, "camera</w>": 3934, "cameraman</w>": 43347, "cameras</w>": 12172, "camero": 20320, "cameron": 19634, "cameron</w>": 8057, "camerondallas</w>": 40587, "cameroon</w>": 24061, "camil": 37745, "camila</w>": 19919, "camilla</w>": 38897, "camille</w>": 26741, "camino</w>": 28529, "camo": 28702, "camo</w>": 19716, "camogie</w>": 39547, "camou": 23588, "camoufla": 23667, "camouflage</w>": 29049, "camp": 2854, "camp</w>": 2877, "campa": 2793, "campaig": 9448, "campaign": 44524, "campaign</w>": 3193, "campaigner</w>": 46364, "campaigners</w>": 40272, "campaigning</w>": 19594, "campaigns</w>": 15669, "campan": 31765, "campbell": 29094, "campbell</w>": 8806, "campe": 16672, "campeon": 49109, "campeones</w>": 30105, "camper": 41914, "camper</w>": 24522, "campers</w>": 26619, "campfire</w>": 32530, "campground</w>": 46969, "camping</w>": 9982, "campo</w>": 27600, "campos</w>": 48077, "camps</w>": 12806, "campsite</w>": 44243, "campu": 19687, "campus</w>": 4560, "campuses</w>": 31895, "camra</w>": 46155, "camry</w>": 46472, "cams</w>": 32590, "can": 950, "can</w>": 753, "cana</w>": 28341, "canad": 13193, "canada</w>": 2698, "canadaday</w>": 39800, "canadi": 4329, "canadian": 22160, "canadian</w>": 5255, "canadians</w>": 18989, "canadiens</w>": 40932, "canal": 28585, "canal</w>": 9535, "canals</w>": 38483, "canaria</w>": 47117, "canary": 40409, "canary</w>": 24523, "canberra</w>": 16719, "canc": 43189, "cancel": 12026, "cancel</w>": 21546, "canceled</w>": 25874, "cancell": 28027, "cancellation</w>": 38765, "cancelled</w>": 13270, "cancels</w>": 34089, "cancer": 12690, "cancer</w>": 3148, "cancers</w>": 33201, "cancun</w>": 34721, "cand": 4986, "candace</w>": 45623, "candel": 47834, "candi": 6034, "candice</w>": 30024, "candid": 7884, "candid</w>": 19206, "candidacy</w>": 46248, "candidate</w>": 6475, "candidates</w>": 8619, "candied</w>": 43982, "candies</w>": 46305, "candle": 18995, "candle</w>": 12674, "candlelight</w>": 34724, "candles</w>": 15472, "candy": 20741, "candy</w>": 6417, "cane": 23644, "cane</w>": 14716, "canelo</w>": 43210, "canes</w>": 21902, "cani": 35592, "canine</w>": 27380, "cann": 4139, "cann</w>": 23709, "cannab": 7577, "cannabis": 31837, "cannabis</w>": 8861, "canne": 44252, "canned</w>": 27290, "cannes</w>": 13773, "canni": 26389, "canning</w>": 38621, "cannon": 28771, "cannon</w>": 15661, "cannons</w>": 46269, "cannot</w>": 4785, "canny</w>": 26986, "cano": 31668, "cano</w>": 25937, "canoe</w>": 23503, "canola</w>": 40389, "canon": 17749, "canon</w>": 9310, "canopy</w>": 26061, "cans</w>": 13707, "cant": 13395, "cant</w>": 5784, "canteen</w>": 39230, "canter": 19301, "canterbury</w>": 22271, "canti": 42845, "cantina</w>": 47472, "canton": 37735, "canton</w>": 25363, "cantore</w>": 41769, "cantwait</w>": 33760, "canu": 20171, "canucks</w>": 24321, "canv": 30714, "canvas": 22441, "canvas</w>": 7483, "canvass</w>": 40054, "canvassing</w>": 33783, "cany": 47674, "canyon": 41246, "canyon</w>": 9755, "cao</w>": 29207, "cap": 1289, "cap</w>": 3938, "capabilities</w>": 19512, "capability</w>": 25885, "capable</w>": 14742, "capac": 24665, "capacity</w>": 8970, "capcom</w>": 28342, "cape": 10288, "cape</w>": 6631, "capecod</w>": 41339, "capes</w>": 38785, "capetown</w>": 20059, "capit": 6889, "capita</w>": 41833, "capital": 11198, "capital</w>": 5439, "capitalism</w>": 20068, "capitalist</w>": 37015, "capitals</w>": 29579, "capitol": 43880, "capitol</w>": 11375, "capo</w>": 45477, "capp": 16718, "capped</w>": 24659, "capping</w>": 42656, "cappuccino</w>": 37402, "capri": 48699, "capri</w>": 30982, "capric": 28667, "capricorn</w>": 46314, "caps</w>": 23185, "capsu": 15608, "capsul": 40341, "capsule</w>": 20627, "capsules</w>": 32870, "capt": 45815, "capt</w>": 17369, "captain": 14958, "captain</w>": 4621, "captainamerica</w>": 46229, "captainmarvel</w>": 48492, "captains</w>": 18706, "caption</w>": 11327, "captions</w>": 41878, "captiv": 19776, "captivating</w>": 30580, "captive</w>": 29038, "captivity</w>": 41141, "capture</w>": 8818, "captured</w>": 8020, "captures</w>": 15305, "capturing</w>": 19548, "capu": 44241, "car": 811, "car</w>": 1615, "cara</w>": 20016, "carab": 32251, "carac": 30029, "caracas</w>": 45854, "caramel</w>": 14788, "carameli": 41739, "caramelized</w>": 43854, "carat</w>": 32981, "carav": 13814, "caravan</w>": 18566, "carb</w>": 21379, "carbo": 43235, "carbon": 14038, "carbon</w>": 7549, "carbs</w>": 29313, "carcin": 31587, "carcinoma</w>": 46810, "card": 10793, "card</w>": 2601, "cardam": 49008, "cardboard</w>": 19845, "cardi": 6211, "cardi</w>": 29677, "cardiac</w>": 21256, "cardiff": 22488, "cardiff</w>": 9781, "cardigan</w>": 30501, "cardin": 8457, "cardinal": 46310, "cardinal</w>": 16472, "cardinals</w>": 12837, "cardio": 15003, "cardio</w>": 23455, "cardiology</w>": 37276, "cardiovascular</w>": 29291, "cardo</w>": 40625, "cards</w>": 4094, "care": 2050, "care</w>": 1776, "cared</w>": 27675, "career": 20609, "career</w>": 3061, "careers</w>": 10090, "careful</w>": 11999, "carefully</w>": 15789, "caregi": 22042, "caregiver</w>": 46372, "caregivers</w>": 35909, "careless</w>": 47325, "carers</w>": 26484, "cares</w>": 10968, "caretaker</w>": 48037, "carey</w>": 14895, "cargo</w>": 12490, "cari": 18497, "cari</w>": 37273, "carib": 9757, "caribbean</w>": 10368, "caribou</w>": 42135, "caric": 25337, "caricature</w>": 38857, "carina</w>": 44357, "caring</w>": 13083, "carl": 8273, "carl</w>": 9482, "carla</w>": 25552, "carleton</w>": 46496, "carlin</w>": 47559, "carlisle</w>": 23276, "carlo": 17861, "carlo</w>": 15266, "carlos</w>": 9538, "carlow</w>": 44745, "carls": 39635, "carlson</w>": 24114, "carlton</w>": 18934, "carly": 23166, "carly</w>": 22689, "carlyle</w>": 46555, "carmel": 30757, "carmel</w>": 25601, "carmen": 41427, "carmen</w>": 18834, "carmichael</w>": 41657, "carn": 21597, "carnage</w>": 31385, "carnation</w>": 44577, "carnaval</w>": 47238, "carne": 17053, "carne</w>": 42885, "carnegie</w>": 25287, "carney</w>": 34194, "carni": 8438, "carnival": 36708, "carnival</w>": 10577, "caro": 30317, "caro</w>": 29344, "carol": 4242, "carol</w>": 11489, "carole</w>": 31955, "carolin": 26418, "carolina</w>": 7027, "caroline": 31064, "caroline</w>": 12641, "carols</w>": 33269, "carolyn</w>": 25825, "carou": 32224, "carousel</w>": 36665, "carp</w>": 26085, "carpen": 15584, "carpenter</w>": 18475, "carpet</w>": 6922, "carpets</w>": 34612, "carr": 26951, "carr</w>": 17136, "carra": 32332, "carre": 31114, "carrera</w>": 32952, "carri": 4739, "carriage": 47885, "carriage</w>": 21087, "carrick</w>": 44052, "carrie": 30334, "carrie</w>": 15848, "carried</w>": 12960, "carrier</w>": 12308, "carriers</w>": 26865, "carries</w>": 17982, "carrieunderwood</w>": 47338, "carrington</w>": 48759, "carroll": 41911, "carroll</w>": 14893, "carrot</w>": 15435, "carrots</w>": 19299, "carry": 31863, "carry</w>": 6998, "carrying</w>": 9920, "cars</w>": 3346, "carsforsale</w>": 45222, "carson": 41766, "carson</w>": 13171, "cart": 27705, "cart</w>": 13065, "cartag": 45042, "cartagena</w>": 47157, "carte</w>": 44949, "cartel</w>": 30529, "carter": 27330, "carter</w>": 7260, "cartier</w>": 32951, "carto": 5487, "carton</w>": 41812, "cartoon": 33082, "cartoon</w>": 7651, "cartoonist</w>": 30793, "cartoons</w>": 17673, "cartri": 47084, "cartridge</w>": 29432, "cartridges</w>": 49249, "carts</w>": 27581, "cartunesapp</w>": 32888, "caruso</w>": 45192, "carve</w>": 40152, "carved</w>": 15127, "carver</w>": 28850, "carving</w>": 19428, "carvings</w>": 48123, "cary</w>": 22844, "cas": 1671, "cas</w>": 13831, "casa</w>": 14643, "casablanc": 36572, "casablanca</w>": 41950, "casc": 36714, "casca": 43296, "cascade</w>": 29065, "cascades</w>": 46454, "case": 17698, "case</w>": 2068, "cases</w>": 6888, "casey": 24899, "casey</w>": 12836, "cash": 11050, "cash</w>": 5131, "cashback</w>": 36368, "cashe": 32233, "cashew</w>": 39531, "cashi": 29517, "cashier</w>": 34547, "cashmere</w>": 34566, "casi": 38350, "casino</w>": 10473, "casio</w>": 32261, "cask</w>": 26299, "casm</w>": 35198, "casper</w>": 35892, "cass</w>": 22556, "cassandra</w>": 35289, "casser": 31093, "casserole</w>": 36045, "cassette</w>": 19717, "cassi": 14942, "cassidy</w>": 21757, "cassie</w>": 29323, "cassini</w>": 46554, "cast": 2509, "cast</w>": 1970, "caste</w>": 32693, "casted</w>": 33838, "castel": 43306, "castell": 31792, "caster": 32101, "caster</w>": 8449, "casters</w>": 29721, "castic</w>": 47737, "castillo</w>": 30813, "casting</w>": 7087, "castle": 12496, "castle</w>": 3540, "castles</w>": 24766, "castro</w>": 16950, "casts</w>": 10595, "casu": 15345, "casual</w>": 10129, "casually</w>": 18840, "casualties</w>": 30244, "casualty</w>": 31222, "cat": 1481, "cat</w>": 2368, "cata</w>": 42279, "catal": 12792, "catalan</w>": 30532, "catalina</w>": 36576, "catalo": 34740, "catalog</w>": 20036, "catalogue</w>": 20985, "catalonia</w>": 27039, "catalunya</w>": 44132, "cataly": 15894, "catalyst</w>": 25387, "catan</w>": 45893, "catap": 39514, "catar": 35801, "catastro": 22736, "catastrophe</w>": 41422, "catastrophic</w>": 34448, "catch": 18901, "catch</w>": 3042, "catcher</w>": 15965, "catchers</w>": 39060, "catches</w>": 17213, "catching</w>": 8617, "catchy</w>": 37114, "catday</w>": 32243, "cate": 6357, "cate</w>": 24510, "cated</w>": 31823, "categor": 17006, "categori": 40117, "categories</w>": 19971, "category</w>": 9432, "cater": 16634, "cater</w>": 38101, "catering</w>": 16697, "caterpillar</w>": 27111, "catfish</w>": 26077, "cath": 9196, "cath</w>": 30811, "cathar": 43784, "cathe": 7174, "cathedr": 46370, "cathedral</w>": 7865, "catherine": 35035, "catherine</w>": 12339, "catho": 7595, "cathol": 16315, "catholic": 20382, "catholic</w>": 7757, "catholics</w>": 36808, "cathy": 40326, "cathy</w>": 22731, "cation</w>": 21367, "cato</w>": 33558, "cats": 38800, "cats</w>": 3989, "catsofinstagram</w>": 39901, "catsoftwitter</w>": 17273, "catt": 37339, "cattle": 48799, "cattle</w>": 13644, "caturday</w>": 20892, "catwalk</w>": 36565, "catwoman</w>": 47251, "cau": 1121, "cau</w>": 45529, "caucus</w>": 18847, "caught</w>": 4520, "caul": 23460, "cauley</w>": 41682, "caulfield</w>": 44906, "cauli": 20123, "cauliflower</w>": 23802, "cause": 18982, "cause</w>": 1394, "caused</w>": 8940, "causes</w>": 9775, "causeway</w>": 35034, "causing</w>": 10779, "caution</w>": 15656, "cautious</w>": 36579, "cav": 4942, "cav</w>": 45935, "cava</w>": 48682, "caval": 24537, "cavali": 20783, "cavalier</w>": 44488, "cavaliers</w>": 30194, "cavalry</w>": 32467, "cave": 25441, "cave</w>": 9654, "cavendish</w>": 42945, "caver": 41487, "caves</w>": 22096, "cavi": 27360, "caviar</w>": 31228, "cavill</w>": 40492, "cavity</w>": 43156, "cavs</w>": 16800, "caw": 38405, "caw</w>": 43804, "cawx</w>": 26739, "cay": 11876, "cay</w>": 37399, "cayenne</w>": 43650, "cayman</w>": 33737, "caz": 48451, "cb": 4034, "cb</w>": 8830, "cba</w>": 38472, "cbb</w>": 31487, "cbc": 14096, "cbc</w>": 14523, "cbd</w>": 13176, "cbe</w>": 43639, "cbi</w>": 30875, "cbj</w>": 35608, "cbn</w>": 26579, "cbp</w>": 46723, "cbr</w>": 28762, "cbs": 16788, "cbs</w>": 8009, "cc": 2976, "cc</w>": 2021, "cca</w>": 17987, "ccc</w>": 21856, "ccd</w>": 48556, "ccg</w>": 37755, "cch": 21789, "cchini</w>": 28467, "cci": 32942, "cci</w>": 8196, "ccl</w>": 43773, "ccm</w>": 40435, "cco</w>": 28786, "ccot</w>": 24950, "ccp</w>": 43045, "ccs</w>": 30400, "cctv</w>": 23097, "ccu</w>": 49023, "cd": 4308, "cd</w>": 4480, "cda</w>": 45565, "cdc": 41098, "cdc</w>": 25779, "cdn": 8886, "cdn</w>": 26802, "cdnpoli</w>": 11645, "cdo</w>": 47187, "cdp</w>": 39624, "cds</w>": 20784, "cdt</w>": 18455, "ce": 685, "ce</w>": 629, "cea</w>": 28355, "cean": 34409, "cean</w>": 37295, "cease": 32856, "cease</w>": 25499, "ceasefire</w>": 38291, "cebu</w>": 20146, "cec": 29694, "cec</w>": 40029, "cecil": 26987, "cecil</w>": 27169, "cecilia</w>": 35440, "ced": 25634, "ced</w>": 2323, "cedar": 24167, "cedar</w>": 13799, "cedric</w>": 36608, "cee": 45966, "cee</w>": 15015, "cees</w>": 47914, "ceil": 27275, "ceiling</w>": 12374, "ceilings</w>": 33770, "cek</w>": 45544, "cel": 2269, "cel</w>": 7597, "cele": 1314, "celeb": 38862, "celeb</w>": 19393, "celebr": 1372, "celebrate": 31414, "celebrate</w>": 2694, "celebrated</w>": 9184, "celebrates</w>": 7564, "celebrating</w>": 3382, "celebration</w>": 4615, "celebrations</w>": 10825, "celebratory</w>": 34115, "celebrities</w>": 17071, "celebrity": 23981, "celebrity</w>": 7320, "celebs</w>": 19803, "celed</w>": 25741, "celer": 9621, "celery</w>": 30990, "celeste</w>": 29364, "celesti": 29497, "celestial</w>": 32669, "celi": 25567, "celia</w>": 44489, "celine</w>": 33644, "cell": 9316, "cell</w>": 5533, "cellar</w>": 24282, "cellars</w>": 44976, "cellence</w>": 34687, "cello</w>": 23013, "cellphone</w>": 39029, "cells</w>": 8890, "cellu": 16791, "cellular</w>": 23268, "cels</w>": 24021, "celsius</w>": 47057, "celtic": 21897, "celtic</w>": 10523, "celticfc</w>": 38612, "celtics</w>": 16226, "cem</w>": 41435, "ceme": 10517, "cement</w>": 4369, "cements</w>": 19448, "cemetery</w>": 11660, "cen": 1306, "cen</w>": 30106, "cena</w>": 21591, "cence</w>": 24410, "cency</w>": 41259, "cene</w>": 30038, "censor": 24230, "censor</w>": 44709, "censored</w>": 30951, "censorship</w>": 27284, "census</w>": 23677, "cent": 1784, "cent</w>": 3662, "centenary</w>": 22422, "centennial</w>": 20895, "center": 16651, "center</w>": 2119, "centered</w>": 24584, "centers</w>": 14494, "centi": 48889, "centime": 48687, "centr": 2370, "central": 13448, "central</w>": 3339, "centre": 26310, "centre</w>": 2916, "centred</w>": 47925, "centres</w>": 19354, "centri": 30872, "centric</w>": 19297, "centro</w>": 37178, "cents</w>": 11934, "centu": 16818, "centuri": 36816, "centuries</w>": 19014, "century": 26134, "century</w>": 4275, "ceo": 46340, "ceo</w>": 3559, "ceos</w>": 28332, "cep": 2632, "cep</w>": 48714, "ceph": 44343, "cept</w>": 3678, "ception</w>": 12346, "cer": 1364, "cer</w>": 1925, "cera</w>": 34608, "ceram": 10677, "ceramic</w>": 15112, "ceramics</w>": 22438, "cere": 3984, "cere</w>": 22085, "cereal</w>": 17581, "cereals</w>": 48618, "cerebral</w>": 39073, "ceremon": 15796, "ceremonial</w>": 33281, "ceremonies</w>": 21547, "ceremony</w>": 5193, "cern</w>": 44851, "cers</w>": 13638, "cert</w>": 27522, "certain": 8526, "certain</w>": 7883, "certainly</w>": 10883, "certainty</w>": 20054, "certi": 4888, "certific": 9443, "certificate</w>": 11786, "certificates</w>": 25281, "certification</w>": 14735, "certified</w>": 9288, "cerv": 25738, "cervical</w>": 35953, "ces": 28715, "ces</w>": 1604, "cesar": 37025, "cesar</w>": 28603, "cess": 2314, "cess</w>": 1554, "cessna</w>": 36596, "cest</w>": 27245, "cester": 15769, "cester</w>": 12718, "cet</w>": 14960, "cett</w>": 46708, "ceu</w>": 37457, "cevic": 48369, "cey</w>": 20971, "cf": 10189, "cf</w>": 11171, "cfa</w>": 34521, "cfb</w>": 32931, "cfc</w>": 11577, "cfd</w>": 46171, "cfl": 46320, "cfl</w>": 22332, "cfo</w>": 26937, "cfp</w>": 40756, "cfr</w>": 44033, "cfs</w>": 32835, "cg": 27118, "cg</w>": 14740, "cgc</w>": 38775, "cgi</w>": 30520, "ch": 540, "ch</w>": 634, "cha": 1587, "cha</w>": 4541, "chab": 26670, "chad": 13095, "chad</w>": 12923, "chae": 9460, "chaf": 38123, "chag": 27989, "chai": 31590, "chai</w>": 18919, "chain": 13898, "chain</w>": 3946, "chained</w>": 34402, "chains</w>": 14438, "chainsaw</w>": 37617, "chainz</w>": 39687, "chair": 4728, "chair</w>": 4269, "chaired</w>": 31664, "chairing</w>": 42205, "chairman</w>": 6901, "chairperson</w>": 31584, "chairs</w>": 12033, "chak": 13702, "chak</w>": 41713, "chakra": 38304, "chakra</w>": 33241, "chal": 7397, "chal</w>": 30809, "chale</w>": 38099, "chalet</w>": 37907, "chalk": 31362, "chalk</w>": 17846, "chall": 2073, "challeng": 4138, "challenge": 29462, "challenge</w>": 2836, "challenged</w>": 17380, "challenger</w>": 18228, "challengers</w>": 46404, "challenges</w>": 6280, "challenging</w>": 11754, "chalmers</w>": 47955, "cham": 1290, "cham</w>": 19951, "chamber": 18983, "chamber</w>": 7642, "chamberlain</w>": 32756, "chambers</w>": 16501, "chamele": 34759, "chameleon</w>": 41317, "champ": 36813, "champ</w>": 6602, "champag": 10283, "champagne</w>": 11007, "champi": 1680, "champion": 2643, "champion</w>": 3950, "champions</w>": 4227, "championship</w>": 3429, "championships</w>": 7047, "championsleague</w>": 27638, "champs</w>": 6240, "chan": 1255, "chan</w>": 6704, "chana</w>": 48752, "chanc": 13931, "chance": 32940, "chance</w>": 2594, "chancellor</w>": 15886, "chances</w>": 10870, "chand": 7126, "chand</w>": 41508, "chandelier</w>": 30570, "chandi": 12482, "chandigarh</w>": 34106, "chandler</w>": 17595, "chandra": 27082, "chandra</w>": 25348, "chanel</w>": 16951, "chang": 2233, "chang</w>": 16461, "change": 11608, "change</w>": 1799, "changeable</w>": 41335, "changed</w>": 4907, "changer</w>": 18406, "changers</w>": 35185, "changes</w>": 4938, "changing": 40384, "changing</w>": 5621, "changmin</w>": 47410, "chann": 8804, "channel": 25837, "channel</w>": 3847, "channeling</w>": 28197, "channels</w>": 13961, "channing</w>": 37417, "chant": 18165, "chant</w>": 13521, "chanting</w>": 32111, "chants</w>": 22723, "chanyeol</w>": 18805, "chao</w>": 31815, "chaos</w>": 10853, "chaotic</w>": 33501, "chap": 3825, "chap</w>": 21939, "chapel": 40859, "chapel</w>": 10137, "chaplain</w>": 38348, "chaplin</w>": 32545, "chapman</w>": 17968, "chapp": 20634, "chaps</w>": 36823, "chapter</w>": 6014, "chapters</w>": 22936, "char": 1054, "char</w>": 16017, "chara</w>": 35668, "charac": 2792, "character": 10997, "character</w>": 4009, "characterdesign</w>": 38149, "characteri": 20920, "characteristic</w>": 44747, "characteristics</w>": 26037, "characters</w>": 6564, "charan</w>": 31851, "charcoal</w>": 19268, "chard</w>": 17524, "chardon": 26599, "chardonnay</w>": 28161, "charge": 25032, "charge</w>": 5948, "chargeable</w>": 35664, "charged</w>": 7916, "charger</w>": 13090, "chargers</w>": 17352, "charges</w>": 8962, "charging</w>": 12514, "chariot</w>": 38811, "charis": 24449, "charisma</w>": 45041, "charismatic</w>": 37205, "charitable</w>": 23256, "charities</w>": 18493, "charity": 20008, "charity</w>": 4607, "charitytuesday</w>": 42794, "charl": 47736, "charle": 10217, "charles": 27983, "charles</w>": 5127, "charleston</w>": 15478, "charley</w>": 38027, "charli": 21784, "charli</w>": 49392, "charlie": 16764, "charlie</w>": 6393, "charlotte": 18445, "charlotte</w>": 7871, "charlottesville</w>": 32027, "charlton</w>": 27048, "charm</w>": 10876, "charmed</w>": 39790, "charming</w>": 12177, "charms</w>": 21944, "charred</w>": 44085, "chart": 42685, "chart</w>": 5053, "charted</w>": 27939, "charter": 42345, "charter</w>": 13569, "chartered</w>": 31298, "charters</w>": 46626, "charting</w>": 39841, "charts</w>": 10728, "chas": 10717, "chas</w>": 29838, "chase": 21503, "chase</w>": 3859, "chased</w>": 30342, "chaser</w>": 29560, "chasers</w>": 34158, "chases</w>": 45011, "chasing": 46909, "chasing</w>": 13376, "chassis</w>": 29188, "chast": 42176, "chasu": 41352, "chat": 5355, "chat</w>": 2402, "chatbots</w>": 43994, "chate": 30377, "chateau": 44582, "chateau</w>": 23520, "chath": 46849, "chatham</w>": 32030, "chats</w>": 13263, "chatt": 21618, "chattanoo": 28009, "chattanooga</w>": 29866, "chatted</w>": 34124, "chatter": 33473, "chatter</w>": 41103, "chatting</w>": 12401, "chatur": 33839, "chau": 11263, "chau</w>": 37536, "chauffe": 45440, "chauhan</w>": 46663, "chav": 28997, "chavez</w>": 27480, "chaw": 39639, "chay": 45317, "chaz": 47815, "chc</w>": 36233, "chd</w>": 41645, "che": 983, "che</w>": 3842, "chea</w>": 39580, "chead": 48358, "cheap": 27036, "cheap</w>": 8678, "cheape": 26164, "cheaper</w>": 17776, "cheapest</w>": 26640, "cheat</w>": 18180, "cheated</w>": 34285, "cheating</w>": 19722, "chec": 1113, "check": 7672, "check</w>": 1217, "checked</w>": 10387, "checker</w>": 45883, "checkers</w>": 48181, "checking</w>": 7441, "checklist</w>": 26989, "checkout</w>": 13101, "checkpoint</w>": 27531, "checks</w>": 13737, "ched": 11341, "ched</w>": 2146, "cheddar</w>": 20551, "chee": 5326, "chee</w>": 20944, "cheek": 40000, "cheek</w>": 21227, "cheeks</w>": 23019, "cheeky</w>": 15068, "cheer": 9733, "cheer</w>": 6918, "cheered</w>": 38111, "cheerful</w>": 28882, "cheering</w>": 14289, "cheerleader</w>": 29072, "cheerleaders</w>": 22343, "cheerleading</w>": 36366, "cheers</w>": 6562, "chees": 15182, "cheese": 10738, "cheese</w>": 4108, "cheeseburger</w>": 41200, "cheesecake</w>": 17803, "cheeses</w>": 36076, "cheesy</w>": 22093, "cheetah</w>": 27431, "chef": 12137, "chef</w>": 4895, "chefs</w>": 14486, "chek</w>": 43745, "chel": 3084, "chel</w>": 25970, "chell</w>": 46854, "chelle</w>": 30141, "chelms": 34936, "chelmsford</w>": 39890, "chelse": 19071, "chelsea</w>": 6031, "chelseafc</w>": 25927, "chelten": 18889, "cheltenham</w>": 21589, "chem": 5667, "chem</w>": 13698, "chemi": 7179, "chemical": 39376, "chemical</w>": 9208, "chemicals</w>": 17426, "chemist</w>": 23138, "chemistry</w>": 8841, "chemo": 33095, "chemo</w>": 36348, "chemotherapy</w>": 41412, "chemtrails</w>": 46015, "chen": 5907, "chen</w>": 8983, "cheney</w>": 43522, "cheng": 32512, "cheng</w>": 30190, "chenko</w>": 29073, "chennai": 28948, "chennai</w>": 12791, "cheon</w>": 11498, "cheque</w>": 28168, "cher": 3597, "cher</w>": 3466, "cheri": 26471, "cherish</w>": 20053, "cherished</w>": 42325, "cherno": 35376, "chernobyl</w>": 40554, "chero": 19844, "cherokee</w>": 22860, "cherries</w>": 27248, "cherry": 21470, "cherry</w>": 7325, "chers</w>": 5789, "chery</w>": 38478, "cheryl": 37784, "cheryl</w>": 20600, "ches": 18346, "ches</w>": 1910, "chesa": 28349, "chesapeake</w>": 32909, "cheshire</w>": 17130, "chesney</w>": 48747, "chess": 27170, "chess</w>": 8397, "chest": 18217, "chest</w>": 10563, "chester": 10466, "chester</w>": 3343, "chesterfield</w>": 32975, "chestnut</w>": 21834, "chet</w>": 9663, "chett</w>": 24695, "chev": 7152, "chev</w>": 41145, "chevro": 12850, "chevrolet</w>": 13240, "chevron</w>": 33792, "chevy</w>": 16581, "chew": 32645, "chew</w>": 22642, "chewan</w>": 23689, "chewbacca</w>": 49355, "chewing</w>": 31486, "chewy</w>": 42940, "chey": 26968, "chey</w>": 31208, "cheyenne</w>": 34805, "chez": 49183, "chez</w>": 10556, "chf</w>": 33021, "chfield</w>": 41619, "chhat": 34127, "chhattisgarh</w>": 44246, "chi": 1337, "chi</w>": 4039, "chia</w>": 19147, "chiang</w>": 33764, "chibi</w>": 22306, "chic": 2627, "chic</w>": 9091, "chica</w>": 44190, "chicag": 16778, "chicago": 15038, "chicago</w>": 3530, "chicagof": 40638, "chicagofire</w>": 46576, "chicas</w>": 40664, "chichester</w>": 43823, "chick": 3170, "chick</w>": 11238, "chicken": 26322, "chicken</w>": 3717, "chickens</w>": 21658, "chickpea</w>": 48109, "chicks</w>": 17810, "chico</w>": 30379, "chie": 40046, "chie</w>": 12388, "chief": 16830, "chief</w>": 3455, "chiefs</w>": 11419, "chiev": 47761, "chiff": 27407, "chiffon</w>": 31817, "chig": 42952, "chihu": 22857, "chihuahu": 25437, "chihuahua</w>": 30181, "chik</w>": 45455, "chil": 1333, "child": 4392, "child</w>": 2913, "childcare</w>": 31133, "childhood": 34772, "childhood</w>": 7551, "childish</w>": 31939, "childre": 2135, "children": 11101, "children</w>": 2153, "childrens": 31551, "childrens</w>": 21553, "childs</w>": 39521, "chile</w>": 10022, "chilean</w>": 33186, "chili</w>": 13033, "chill": 6498, "chill</w>": 6382, "chilled</w>": 23540, "chillen</w>": 45160, "chilli": 26787, "chilli</w>": 17067, "chillin</w>": 10347, "chilling</w>": 10179, "chillout</w>": 39842, "chills</w>": 25460, "chilly</w>": 14450, "chim": 10543, "chimney</w>": 26821, "chimp</w>": 44374, "chin": 6555, "chin</w>": 8979, "china": 38943, "china</w>": 2817, "chinatown</w>": 28582, "chine": 4013, "chinese": 30568, "chinese</w>": 4271, "ching": 34621, "ching</w>": 1439, "chino": 47181, "chino</w>": 27440, "chinook</w>": 41577, "chinson</w>": 33786, "chio</w>": 19650, "chip": 19271, "chip</w>": 8730, "chipmun": 46384, "chipot": 17702, "chipotle</w>": 19284, "chipp": 39854, "chippe": 46541, "chipped</w>": 39892, "chipping</w>": 40323, "chips</w>": 8855, "chir": 15564, "chiro": 23413, "chiroprac": 25987, "chiropractic</w>": 34437, "chis": 19920, "chistan</w>": 20523, "chiswick</w>": 47290, "chit": 13515, "chit</w>": 45626, "chita</w>": 49184, "chitec": 39862, "chive</w>": 29222, "chives</w>": 34921, "chk</w>": 47424, "chl</w>": 38592, "chley</w>": 47748, "chlo": 10374, "chloe": 39966, "chloe</w>": 13992, "chlor": 23135, "chman</w>": 35835, "chment</w>": 20848, "chner</w>": 48277, "cho": 1327, "cho</w>": 5150, "choa</w>": 43077, "choc": 32772, "choc</w>": 21983, "choco": 46285, "choco</w>": 32692, "chocol": 3443, "chocolat</w>": 44631, "chocolate": 29389, "chocolate</w>": 3820, "chocolates</w>": 24120, "choi</w>": 23749, "choic": 35606, "choice": 23857, "choice</w>": 4051, "choices</w>": 11016, "choir</w>": 9214, "choirs</w>": 43277, "choke</w>": 30231, "choked</w>": 43521, "choker</w>": 39642, "choking</w>": 39993, "chol": 19802, "cholera</w>": 45999, "cholester": 26861, "cholesterol</w>": 27982, "chom": 25151, "chon": 20416, "chon</w>": 21601, "chondri": 37379, "chong</w>": 26220, "choo": 3869, "choo</w>": 24437, "chool</w>": 29578, "chools</w>": 41958, "choose": 22756, "choose</w>": 5073, "chooses</w>": 29923, "choosing</w>": 13475, "chop": 10458, "chop</w>": 16663, "chopin</w>": 42256, "chopped</w>": 22580, "chopper</w>": 24011, "chopping</w>": 35375, "chopra</w>": 24258, "chops</w>": 26321, "chor": 7567, "chor</w>": 47795, "choral</w>": 26684, "chord</w>": 33005, "chords</w>": 36152, "choreo": 17443, "choreographer</w>": 35952, "choreography</w>": 32749, "chores</w>": 40483, "chori": 25718, "chorizo</w>": 30802, "chorus</w>": 20869, "chos</w>": 26559, "chose</w>": 11090, "chosen</w>": 10044, "chou": 16960, "chou</w>": 42917, "choudhary</w>": 45503, "chow": 20257, "chow</w>": 21657, "chowder</w>": 37886, "chp</w>": 35896, "chr</w>": 36918, "chri": 1135, "chris": 9907, "chris</w>": 2978, "chrisbrown</w>": 41035, "chriss": 46745, "chrissy": 44762, "chrissy</w>": 40485, "christ": 1403, "christ</w>": 6703, "christchurch</w>": 27100, "christen": 31956, "christensen</w>": 42226, "christi": 3328, "christi</w>": 33213, "christian": 11792, "christian</w>": 4729, "christianity</w>": 20000, "christians</w>": 14842, "christie</w>": 16084, "christin": 30189, "christina</w>": 15925, "christine": 42610, "christine</w>": 14712, "christma": 12039, "christmas": 18174, "christmas</w>": 1677, "christmaseve</w>": 44381, "christmass": 44873, "christop": 7917, "christoph</w>": 47844, "christophe</w>": 45486, "christopher": 33349, "christopher</w>": 9630, "christy</w>": 28331, "chro": 13207, "chromatic</w>": 44207, "chrome": 24843, "chrome</w>": 9529, "chromo": 35809, "chron": 5577, "chron</w>": 39781, "chronic": 10115, "chronic</w>": 13677, "chronicle</w>": 20034, "chronicles</w>": 18905, "chrono": 29387, "chronograph</w>": 38397, "chry": 13508, "chrysler</w>": 20078, "chs": 40277, "chs</w>": 8391, "chsnews</w>": 44919, "cht</w>": 11384, "chter</w>": 47811, "chu": 3799, "chu</w>": 13622, "chubby</w>": 29109, "chuck": 13211, "chuck</w>": 9894, "chuckle</w>": 35733, "chucky</w>": 42026, "chuffed</w>": 27233, "chuk": 25878, "chuk</w>": 27221, "chul</w>": 33001, "chum": 46869, "chum</w>": 41767, "chun": 14693, "chun</w>": 25391, "chung</w>": 28418, "chunk</w>": 30275, "chunks</w>": 45538, "chunky</w>": 27978, "chups</w>": 46331, "chur": 2309, "church": 14956, "church</w>": 2735, "churches</w>": 15539, "churchill</w>": 17527, "chus</w>": 36246, "chut": 28788, "chutney</w>": 36261, "chy": 15131, "chy</w>": 8096, "chyna</w>": 43398, "châ": 48669, "ci": 698, "ci</w>": 5798, "cia</w>": 4019, "cial</w>": 1143, "cian</w>": 32323, "ciao</w>": 37677, "ciara</w>": 31369, "cible</w>": 28873, "cic": 14539, "cic</w>": 21517, "cid</w>": 27359, "cide</w>": 34178, "cider</w>": 13547, "cides</w>": 41326, "cie</w>": 19730, "cier": 24067, "cies</w>": 6785, "cif</w>": 35698, "cigar": 26031, "cigar</w>": 16525, "cigare": 13044, "cigarette</w>": 18548, "cigarettes</w>": 22750, "cigars</w>": 20750, "cii</w>": 42408, "cil": 9217, "cil</w>": 2998, "cilan": 33998, "cilantro</w>": 34568, "cili": 18977, "ciliation</w>": 25294, "cim</w>": 30021, "cin": 2396, "cin</w>": 25367, "cina</w>": 39467, "cincin": 13291, "cincinnati</w>": 14197, "cinco</w>": 25131, "cincode": 40930, "cincodemayo</w>": 42542, "cincy": 30015, "cincy</w>": 30286, "cinde": 20660, "cinderella</w>": 21515, "cindy": 34439, "cindy</w>": 18532, "cine": 4015, "cine</w>": 27451, "cinema": 38251, "cinema</w>": 6443, "cinemas</w>": 14845, "cinematic</w>": 25602, "cinemato": 21919, "cinematographer</w>": 39059, "cinematography</w>": 33802, "ciner": 39882, "cing</w>": 4014, "cini</w>": 25699, "cinnam": 12768, "cinnamon</w>": 13460, "cino</w>": 18616, "cio": 44584, "cio</w>": 9954, "cion</w>": 22024, "ciones</w>": 37155, "cious</w>": 38466, "cip": 32884, "cir": 2459, "cir</w>": 41135, "circa</w>": 10411, "circle": 33574, "circle</w>": 7117, "circles</w>": 19411, "circling</w>": 46036, "circu": 5143, "circuit": 35583, "circuit</w>": 9801, "circuits</w>": 33260, "circul": 16618, "circular</w>": 19733, "circulare": 39525, "circulareconomy</w>": 39878, "circulated</w>": 46258, "circulating</w>": 42980, "circulation</w>": 27880, "circum": 13406, "circumstances</w>": 18786, "circus</w>": 11833, "cirque</w>": 36049, "cis": 9459, "cis</w>": 23513, "cisco": 36689, "cisco</w>": 19290, "cise</w>": 19657, "cisely</w>": 33434, "cision</w>": 41957, "cism</w>": 24166, "cist</w>": 40906, "cit": 4420, "cit</w>": 31294, "citadel</w>": 38036, "citation</w>": 33581, "cite</w>": 32641, "cited</w>": 25069, "cites</w>": 34490, "citi": 4280, "citi</w>": 30270, "cities</w>": 5441, "citing</w>": 29088, "citiz": 5816, "citizen": 11720, "citizen</w>": 9814, "citizens</w>": 7949, "citizenship</w>": 17386, "cito</w>": 42636, "citro": 27941, "citroen</w>": 35805, "citrus</w>": 17379, "city": 5002, "city</w>": 1305, "cityfc</w>": 28751, "cityo": 25709, "cityof": 11595, "cityscape</w>": 40808, "ciu": 39693, "cius</w>": 42559, "civ": 40039, "civic": 32240, "civic</w>": 11888, "civil": 6923, "civil</w>": 6450, "civilian</w>": 21187, "civilians</w>": 18076, "civilization</w>": 22503, "civilwar</w>": 34524, "ción</w>": 44700, "cj": 15238, "cj</w>": 15205, "ck": 916, "ck</w>": 868, "cke": 25224, "cke</w>": 40989, "cked</w>": 3441, "cken": 25566, "cker": 15509, "cker</w>": 4744, "ckers</w>": 37073, "cket</w>": 5525, "ckett</w>": 33899, "ckey": 15029, "ckey</w>": 3657, "cki": 36916, "cki</w>": 41055, "cking</w>": 4805, "cko</w>": 28818, "cks</w>": 2031, "cky": 26229, "cky</w>": 3083, "cl": 969, "cl</w>": 6482, "cla": 940, "cla</w>": 20636, "clad</w>": 31606, "cladding</w>": 46411, "clai": 29459, "claim": 4290, "claim</w>": 6607, "claimed</w>": 9010, "claiming</w>": 15286, "claims</w>": 6852, "clair": 31441, "clair</w>": 14039, "claire": 20410, "claire</w>": 10460, "clam": 13588, "clam</w>": 32598, "clamation</w>": 21793, "clamp</w>": 41501, "clams</w>": 38849, "clan": 29252, "clan</w>": 14114, "clancy</w>": 37227, "clans</w>": 38279, "clap": 30037, "clap</w>": 25546, "clapham</w>": 43619, "clapton</w>": 37683, "clar": 3617, "clara</w>": 19468, "clare": 18948, "clare</w>": 15927, "claremont</w>": 47789, "clarence</w>": 29320, "clari": 15175, "clarify</w>": 37004, "clarinet</w>": 41178, "clarity</w>": 21323, "clark": 13340, "clark</w>": 7521, "clarke</w>": 11548, "clarkson</w>": 25706, "clas": 32003, "clash": 38367, "clash</w>": 9359, "clashes</w>": 25193, "clasico</w>": 43567, "class": 2876, "class</w>": 1874, "classes</w>": 6919, "classi": 2507, "classic": 9353, "classic</w>": 2713, "classical": 22179, "classical</w>": 11355, "classicalmusic</w>": 27806, "classiccar</w>": 46906, "classiccars</w>": 21064, "classics</w>": 10634, "classification</w>": 26612, "classified</w>": 22056, "classmate</w>": 37090, "classmates</w>": 30062, "classof</w>": 25345, "classroom</w>": 9001, "classrooms</w>": 25768, "classy</w>": 11615, "clau": 7526, "claude</w>": 17461, "claudi": 39439, "claudia</w>": 21893, "claudio</w>": 31230, "claus</w>": 23317, "clause</w>": 26151, "clave</w>": 24111, "claw": 49230, "claw</w>": 19106, "claws</w>": 29161, "clay": 10402, "clay</w>": 8823, "clays</w>": 26128, "clayton": 46445, "clayton</w>": 19413, "clc</w>": 31380, "cle": 1321, "cle</w>": 2537, "clean": 3572, "clean</w>": 3772, "cleaned</w>": 17468, "cleanenergy</w>": 43538, "cleaner</w>": 15619, "cleaners</w>": 33258, "cleaning</w>": 7210, "cleanliness</w>": 47886, "cleans</w>": 40827, "cleanse</w>": 28717, "cleanser</w>": 44170, "cleansing</w>": 25931, "cleanup</w>": 22353, "clear": 4631, "clear</w>": 3143, "clearance</w>": 17959, "cleared</w>": 14880, "clearer</w>": 37031, "clearing</w>": 15481, "clearly</w>": 7767, "clears</w>": 29092, "clearwater</w>": 32124, "cleary</w>": 44342, "cleats</w>": 33486, "cleavage</w>": 44165, "cled</w>": 12827, "clegg</w>": 42915, "clemens</w>": 45896, "clement": 22592, "clement</w>": 24714, "clemente</w>": 42461, "clementine</w>": 47112, "clements</w>": 49175, "clemson": 38170, "clemson</w>": 19537, "clen": 35547, "cleo</w>": 40344, "cleop": 36287, "cleopatra</w>": 41212, "cler": 11828, "clergy</w>": 42635, "cleric</w>": 43748, "clerk</w>": 22230, "clermont</w>": 47529, "cles</w>": 8077, "cleve": 37599, "clevel": 7701, "cleveland": 30716, "cleveland</w>": 8430, "clever": 30977, "clever</w>": 13385, "clg</w>": 47546, "cli": 1503, "clich": 44407, "click": 16676, "click</w>": 3585, "clicked</w>": 29015, "clicking</w>": 26542, "clicks</w>": 31250, "client": 48528, "client</w>": 7467, "clients</w>": 8114, "clif": 13182, "cliff": 23827, "cliff</w>": 10625, "cliffe</w>": 15170, "clifford</w>": 24226, "cliffs</w>": 20953, "clifton</w>": 23878, "climat": 37283, "climate": 7854, "climate</w>": 4589, "climateaction</w>": 31622, "climatechange</w>": 11055, "climates</w>": 46022, "climax</w>": 37033, "climb": 7421, "climb</w>": 10649, "climbed</w>": 22528, "climber</w>": 36910, "climbers</w>": 47648, "climbing</w>": 9877, "climbs</w>": 29098, "clin": 2879, "clinch</w>": 30404, "clinched</w>": 44064, "cline</w>": 37460, "cling": 37068, "cling</w>": 4760, "clinic</w>": 7926, "clinical": 35133, "clinical</w>": 9148, "clinicians</w>": 45866, "clinics</w>": 23330, "clint": 37542, "clint</w>": 21160, "clinton": 34403, "clinton</w>": 5820, "clio</w>": 46889, "clip": 39712, "clip</w>": 9289, "clipped</w>": 45524, "clipper</w>": 42245, "clippers</w>": 23319, "clipping</w>": 47484, "clips</w>": 16594, "clique</w>": 34983, "clive": 36086, "clive</w>": 21509, "cll": 46091, "cllr": 45743, "cllr</w>": 23034, "clo": 1194, "cloak</w>": 36528, "clock": 19878, "clock</w>": 6716, "clocked</w>": 49049, "clocks</w>": 25895, "clockwise</w>": 46150, "clockwork</w>": 42297, "clon": 24477, "clone</w>": 22854, "clones</w>": 48047, "clooney</w>": 33161, "clos</w>": 48821, "close": 10603, "close</w>": 2660, "closed</w>": 4552, "closely</w>": 13478, "closer</w>": 6377, "closes</w>": 11354, "closest</w>": 14975, "closet</w>": 14221, "closeup</w>": 35439, "closing</w>": 7101, "closure</w>": 13249, "closures</w>": 22923, "cloth</w>": 14559, "clothes</w>": 7080, "clothing</w>": 7425, "clou": 4069, "cloud": 12965, "cloud</w>": 3887, "cloudcomputing</w>": 41390, "clouds</w>": 6244, "cloudy</w>": 13106, "clough</w>": 42909, "clover": 39574, "clover</w>": 22812, "clow": 18386, "clown</w>": 15329, "clowns</w>": 30820, "cls</w>": 44251, "clt": 29651, "clt</w>": 24236, "clu": 996, "club": 9642, "club</w>": 1736, "clubbing</w>": 48128, "clubhouse</w>": 26553, "clubs</w>": 9437, "clue</w>": 14994, "clueless</w>": 35350, "clues</w>": 23764, "clusive</w>": 41362, "cluster</w>": 15595, "clusters</w>": 33217, "clut": 28507, "clutch</w>": 13953, "clutter</w>": 40804, "cly": 12037, "clyde": 39557, "clyde</w>": 18469, "cm": 10190, "cm</w>": 3741, "cma": 30554, "cma</w>": 31388, "cmc</w>": 45839, "cmdr</w>": 48250, "cme</w>": 34946, "cmo</w>": 24589, "cmon</w>": 42904, "cmp": 46355, "cms</w>": 22520, "cmt</w>": 42727, "cmu</w>": 43046, "cn": 3886, "cn</w>": 16200, "cna</w>": 48287, "cnbc": 41242, "cnbc</w>": 24371, "cnblue</w>": 36018, "cnc</w>": 20571, "cnet</w>": 47487, "cnews</w>": 24319, "cng</w>": 41496, "cnn": 22405, "cnn</w>": 8259, "cns</w>": 46095, "cny</w>": 31614, "co": 622, "co</w>": 1320, "coa</w>": 29167, "coach": 3275, "coach</w>": 2312, "coached</w>": 30228, "coachella</w>": 20222, "coaches</w>": 6924, "coaching</w>": 7766, "coal": 10227, "coal</w>": 7919, "coalition</w>": 12920, "coast": 6398, "coast</w>": 3720, "coastal": 38246, "coastal</w>": 10852, "coaster</w>": 15944, "coasters</w>": 31548, "coastguard</w>": 40601, "coastline</w>": 27959, "coasts</w>": 42225, "coat": 28869, "coat</w>": 7356, "coated</w>": 23401, "coates</w>": 36899, "coating</w>": 25369, "coatings</w>": 48706, "coats</w>": 18075, "cob": 20140, "cob</w>": 32863, "cobain</w>": 36866, "cobalt</w>": 30896, "cobb</w>": 22719, "cobble": 47894, "cobra</w>": 21574, "coc": 23036, "coc</w>": 39498, "coca</w>": 21197, "cocac": 26393, "cocacola</w>": 31248, "cocaine</w>": 20534, "coch": 18599, "cochran</w>": 48798, "cochrane</w>": 41752, "coco": 11850, "coco</w>": 13316, "cocoa</w>": 18074, "cocon": 8597, "coconut</w>": 9581, "cod": 16132, "cod</w>": 11915, "code": 11582, "code</w>": 3217, "coded</w>": 33703, "coden": 43914, "coder</w>": 41561, "codes</w>": 14566, "codi": 39711, "coding</w>": 12647, "cody": 23222, "cody</w>": 12666, "coe</w>": 15386, "coed</w>": 41028, "coel": 45633, "coer": 41198, "coeur</w>": 44986, "coffe": 2255, "coffee": 12898, "coffee</w>": 2453, "coffees</w>": 41184, "coffey</w>": 48066, "cofficial</w>": 18757, "coffin</w>": 29907, "cog": 26362, "cog</w>": 35960, "cogn": 12210, "cognac</w>": 44361, "cognition</w>": 46825, "cognitive</w>": 16584, "cohe": 20669, "cohen</w>": 13381, "coherent</w>": 48450, "cohort</w>": 22782, "coil</w>": 25307, "coim": 41528, "coin": 14651, "coin</w>": 4170, "coinci": 14015, "coincidence</w>": 19807, "coins</w>": 10530, "coke": 39602, "coke</w>": 14035, "col": 754, "col</w>": 9371, "cola</w>": 15444, "colbert</w>": 31647, "colby</w>": 32068, "colchester</w>": 31715, "cold": 11146, "cold</w>": 3153, "colder</w>": 23859, "coldest</w>": 31438, "coldplay</w>": 27770, "cole": 9305, "cole</w>": 8166, "coleman</w>": 15774, "coles": 40265, "coles</w>": 30398, "coli": 18877, "coli</w>": 15910, "colin": 20989, "colin</w>": 10238, "coliseum</w>": 21836, "coll": 25982, "coll</w>": 23898, "colla": 2929, "collab</w>": 14013, "collabor": 4437, "collaborate</w>": 21271, "collaborated</w>": 42265, "collaborating</w>": 25545, "collaboration</w>": 6642, "collaborations</w>": 36520, "collaborative</w>": 15841, "collaborator</w>": 48186, "collaborators</w>": 45901, "collage</w>": 11258, "collagen</w>": 36120, "collap": 16881, "collapse</w>": 16520, "collapsed</w>": 25037, "collapses</w>": 43601, "collar": 39662, "collar</w>": 13497, "collateral</w>": 44512, "colle": 1801, "colleague</w>": 13067, "colleagues</w>": 8203, "collec": 1733, "collect</w>": 10186, "collected</w>": 11980, "collecti": 18530, "collectible</w>": 25680, "collectibles</w>": 21519, "collecting</w>": 10325, "collection</w>": 2548, "collections</w>": 12760, "collective</w>": 10162, "collectively</w>": 40687, "collector</w>": 13522, "collectors</w>": 20540, "collects</w>": 31576, "colleen</w>": 31020, "college": 13512, "college</w>": 2229, "colleges</w>": 17357, "collegi": 16311, "collegiate</w>": 18068, "colli": 8262, "collide</w>": 27214, "collie</w>": 30611, "collier</w>": 35748, "collin": 24056, "collin</w>": 32116, "colling": 32319, "collingwood</w>": 45873, "collins</w>": 8684, "collision</w>": 15407, "collo": 25115, "colloqui": 37243, "colloquium</w>": 46514, "collu": 25658, "collusion</w>": 33864, "colo": 7300, "colo</w>": 27288, "cologne</w>": 22216, "cology</w>": 19187, "colom": 8987, "colombia</w>": 12901, "colombian</w>": 28701, "colombo</w>": 33207, "colon": 8280, "colon</w>": 29050, "colonel</w>": 22674, "coloni": 22667, "colonial</w>": 16530, "colonialism</w>": 43385, "colonies</w>": 38738, "colony</w>": 18767, "color": 4036, "color</w>": 3140, "colorado": 34580, "colorado</w>": 6742, "colorec": 41171, "colored</w>": 11775, "colorful</w>": 11444, "colori": 28764, "coloring</w>": 17696, "colorized</w>": 46730, "colors</w>": 5389, "colorstv</w>": 28195, "colorway</w>": 44576, "colossal</w>": 40258, "colosse": 48142, "colossus</w>": 34022, "colour": 10240, "colour</w>": 4769, "coloured</w>": 17111, "colourful</w>": 15562, "colouring</w>": 31803, "colours</w>": 7626, "cols</w>": 35726, "colt</w>": 19726, "colton</w>": 32249, "coltrane</w>": 42333, "colts</w>": 16135, "colum": 4164, "columb": 31043, "columbi": 25947, "columbia</w>": 9410, "columbus</w>": 11273, "column</w>": 10593, "columnist</w>": 28958, "columns</w>": 29056, "com": 610, "com</w>": 2464, "coma</w>": 19620, "comb": 3587, "comb</w>": 16380, "combat": 35083, "combat</w>": 9275, "combating</w>": 46121, "combe</w>": 14363, "combin": 25112, "combination</w>": 11312, "combinations</w>": 34950, "combine</w>": 12919, "combined</w>": 10427, "combines</w>": 22991, "combining</w>": 23561, "combo</w>": 10155, "combos</w>": 48117, "combs</w>": 30694, "combu": 35629, "combustion</w>": 44654, "comcast</w>": 30043, "come": 4225, "come</w>": 891, "comeback</w>": 8234, "comedian</w>": 13848, "comedians</w>": 33758, "comedic</w>": 43360, "comedy": 19346, "comedy</w>": 4749, "comer": 42997, "comer</w>": 20916, "comers</w>": 34436, "comes</w>": 2091, "comet</w>": 21405, "comets</w>": 40636, "comey</w>": 22957, "comfor": 6563, "comfort": 44000, "comfort</w>": 7808, "comfortable</w>": 8652, "comfortably</w>": 30392, "comforting</w>": 33835, "comforts</w>": 42243, "comfy</w>": 15736, "comi": 40781, "comic": 7729, "comic</w>": 4962, "comicart</w>": 46018, "comicbook</w>": 46564, "comicbooks</w>": 22018, "comiccon</w>": 18379, "comicon</w>": 43820, "comics</w>": 4256, "comin</w>": 18164, "coming": 14916, "coming</w>": 1171, "comingsoon</w>": 19894, "comm": 965, "comm</w>": 11413, "comman": 39780, "command": 18391, "command</w>": 11350, "commander</w>": 11265, "commanders</w>": 41667, "commanding</w>": 36933, "commandments</w>": 43409, "commando</w>": 31361, "commands</w>": 38163, "comme</w>": 29692, "commemor": 9495, "commemorate</w>": 21242, "commemorates</w>": 45149, "commemorating</w>": 28734, "commemoration</w>": 29288, "commemorative</w>": 24623, "commen": 15795, "commence</w>": 25059, "commenced</w>": 43908, "commencement</w>": 21666, "commences</w>": 48551, "commend": 37555, "commended</w>": 40702, "comment": 20035, "comment</w>": 5761, "commentary</w>": 14146, "commentator</w>": 32016, "commented</w>": 28328, "commenting</w>": 37292, "comments</w>": 6606, "commer": 4028, "commerce</w>": 8333, "commerci": 15601, "commercial": 31802, "commercial</w>": 6287, "commercials</w>": 30724, "commish</w>": 45399, "commissi": 6000, "commission</w>": 5292, "commissioned</w>": 16565, "commissioner</w>": 10221, "commissioners</w>": 30702, "commissioning</w>": 29585, "commissions</w>": 20668, "commit": 3041, "commit</w>": 11797, "commitment</w>": 7770, "commitments</w>": 32136, "commits</w>": 20241, "committed</w>": 7907, "committee</w>": 5636, "committees</w>": 40504, "committing</w>": 21937, "commod": 9496, "commodities</w>": 30350, "commodity</w>": 29041, "commodore</w>": 31129, "common": 8414, "common</w>": 4176, "commonly</w>": 20344, "commons</w>": 16653, "commonwealth</w>": 16569, "comms</w>": 18832, "commu": 9561, "commun": 1515, "communal</w>": 32809, "communi": 16164, "communic": 4784, "communicate</w>": 19809, "communication</w>": 7999, "communications</w>": 10052, "communion</w>": 28579, "communism</w>": 35387, "communist</w>": 18602, "communities</w>": 6361, "community": 14784, "community</w>": 1927, "commute</w>": 15898, "commuter</w>": 27782, "commuters</w>": 30823, "commuting</w>": 43503, "como</w>": 16236, "comp": 2561, "comp</w>": 11679, "compac": 40014, "compact</w>": 13690, "compan": 1995, "companies</w>": 5361, "companion</w>": 14963, "companions</w>": 37124, "company</w>": 2634, "compar": 7580, "comparable</w>": 27092, "comparative</w>": 33388, "compare</w>": 13771, "compared</w>": 10544, "compares</w>": 25104, "comparing</w>": 20564, "comparison</w>": 14186, "comparisons</w>": 40870, "compart": 30072, "compartment</w>": 40383, "compass</w>": 19438, "compassion</w>": 14463, "compassionate</w>": 30193, "compati": 17295, "compatibility</w>": 41614, "compatible</w>": 21286, "compe": 5254, "compelled</w>": 49375, "compelling</w>": 21766, "compen": 42079, "compens": 15172, "compensation</w>": 18663, "compet": 2932, "compete</w>": 10038, "competed</w>": 27767, "competen": 31853, "competence</w>": 31165, "competency</w>": 49293, "competent</w>": 28113, "competes</w>": 39826, "competing</w>": 13068, "competit": 15892, "competiti": 32581, "competition</w>": 3742, "competitions</w>": 23259, "competitive</w>": 10687, "competitiveness</w>": 43209, "competitor</w>": 26633, "competitors</w>": 23638, "compilation</w>": 20446, "compiled</w>": 34579, "compla": 7428, "complain</w>": 19292, "complained</w>": 42029, "complaining</w>": 20812, "complains</w>": 46363, "complaint</w>": 20391, "complaints</w>": 20020, "comple": 1730, "complement</w>": 36624, "complementary</w>": 48953, "complete</w>": 3263, "completed</w>": 5976, "completely</w>": 5989, "completes</w>": 19321, "completing</w>": 14949, "completion</w>": 15915, "complex": 16099, "complex</w>": 6324, "complexes</w>": 47870, "complexion</w>": 47732, "complexity</w>": 24815, "compli": 5270, "compliance</w>": 14658, "compliant</w>": 29893, "complic": 11460, "complicated</w>": 16621, "complications</w>": 29936, "compliment</w>": 25116, "complimentary</w>": 20948, "compliments</w>": 25477, "comply</w>": 36281, "component</w>": 21284, "components</w>": 16816, "compos": 7783, "compose</w>": 43659, "composed</w>": 19916, "composer</w>": 12104, "composers</w>": 33314, "composing</w>": 40412, "composite</w>": 21606, "composites</w>": 45395, "composition</w>": 17510, "compositions</w>": 44652, "compost": 46002, "compost</w>": 33307, "compound</w>": 19980, "compounds</w>": 33991, "compre": 8483, "compreh": 42976, "comprehen": 12050, "comprehend</w>": 48230, "comprehensive</w>": 13854, "compress": 33353, "compressed</w>": 42359, "compression</w>": 25638, "compressor</w>": 39607, "compri": 29445, "compromise</w>": 26611, "compromised</w>": 38576, "compromising</w>": 45436, "comps</w>": 48665, "compton</w>": 28364, "compu": 11639, "compul": 25869, "compulsory</w>": 39345, "computing</w>": 12732, "comra": 25553, "comrade</w>": 30844, "comrades</w>": 29282, "coms</w>": 30493, "con": 616, "con</w>": 2457, "cona</w>": 30605, "conan</w>": 24750, "conce": 9145, "concealed</w>": 35419, "conceded</w>": 37895, "conceived</w>": 39725, "concentr": 11085, "concentrate</w>": 30846, "concentrated</w>": 36776, "concentration</w>": 18565, "concep": 8389, "concepcion</w>": 47035, "concept</w>": 6353, "conceptart</w>": 31162, "conception</w>": 30510, "conceptions</w>": 40307, "concepts</w>": 16763, "conceptu": 42745, "conceptual</w>": 34070, "concer": 2228, "concern</w>": 12928, "concerned</w>": 12020, "concerning</w>": 21772, "concerns</w>": 11134, "concert": 32180, "concert</w>": 3066, "concerto</w>": 24710, "concerts</w>": 14418, "concession</w>": 38117, "concessions</w>": 43981, "concier": 28859, "concierge</w>": 39850, "conclave</w>": 38098, "conclu": 9627, "conclude</w>": 37525, "concluded</w>": 27825, "concludes</w>": 30634, "conclusion</w>": 20932, "conclusions</w>": 39507, "conco": 43034, "concor": 19913, "concord</w>": 26448, "concordia</w>": 35492, "concours</w>": 36282, "concourse</w>": 37793, "concre": 43658, "concrete</w>": 9637, "concussion</w>": 28321, "condem": 13287, "condemn</w>": 27212, "condemned</w>": 35145, "condemns</w>": 32092, "conden": 24816, "conditi": 11170, "condition": 36978, "condition</w>": 7336, "conditional</w>": 24671, "conditioned</w>": 37014, "conditioner</w>": 31239, "conditioning</w>": 18181, "conditions</w>": 5892, "condo</w>": 19952, "condol": 18661, "condolences</w>": 20836, "condom</w>": 39021, "condomin": 42589, "condoms</w>": 37878, "condor</w>": 47643, "condos</w>": 42342, "condu": 40772, "conduc": 5379, "conduct</w>": 11647, "conducted</w>": 13080, "conducting</w>": 16787, "conductor</w>": 22317, "conducts</w>": 32084, "cone": 39279, "cone</w>": 10266, "cones</w>": 26718, "coney</w>": 41837, "conf</w>": 6477, "confe": 1968, "confeder": 17104, "confederate</w>": 24864, "confederation</w>": 43484, "conferen": 37961, "conference</w>": 2230, "conferences</w>": 22811, "conferencing</w>": 47320, "confess</w>": 38860, "confession</w>": 22572, "confessions</w>": 29404, "confetti</w>": 37923, "confi": 5005, "confidence</w>": 8510, "confident</w>": 12365, "confidential</w>": 28712, "configu": 46746, "configur": 26950, "configuration</w>": 33378, "confin": 45316, "confined</w>": 40973, "confir": 3930, "confirm</w>": 12130, "confirmation</w>": 19645, "confirmed</w>": 6346, "confirming</w>": 38433, "confirms</w>": 11803, "confis": 36285, "confit</w>": 42241, "confl": 8173, "conflic": 19029, "conflict</w>": 10397, "conflicting</w>": 43894, "conflicts</w>": 28713, "confor": 40933, "confron": 20033, "confront</w>": 38382, "confrontation</w>": 41478, "confu": 6890, "confuse</w>": 37503, "confused</w>": 10946, "confusing</w>": 24683, "confusion</w>": 20493, "cong</w>": 24407, "conge": 20013, "congestion</w>": 24432, "congo</w>": 20334, "congr": 1227, "congrats</w>": 1887, "congratul": 1750, "congratulate</w>": 16633, "congratulated</w>": 42004, "congratulates</w>": 24580, "congratulating</w>": 30967, "congratulation</w>": 24751, "congratulations</w>": 1864, "congre": 7947, "congreg": 40727, "congregation</w>": 32618, "congress": 12452, "congress</w>": 4599, "congressional</w>": 15239, "congressman</w>": 17145, "congresswoman</w>": 37317, "coni": 39031, "coni</w>": 36651, "conj": 41543, "conju": 33821, "conjunction</w>": 34226, "conley</w>": 44536, "conline</w>": 37593, "conn": 41836, "conn</w>": 20329, "conne": 8437, "connec": 29933, "connect</w>": 19969, "connected</w>": 27506, "connecting</w>": 41429, "connection</w>": 26840, "connections</w>": 37161, "connie</w>": 25739, "connoisse": 46012, "connol": 27739, "connolly</w>": 29537, "connor": 21984, "connor</w>": 10218, "conom": 2664, "conomy</w>": 22529, "conor": 29955, "conor</w>": 19478, "conqu": 13382, "conquer": 38585, "conquer</w>": 19821, "conquered</w>": 27099, "conquering</w>": 43778, "conquest</w>": 35367, "conrad</w>": 22073, "cons</w>": 10311, "consci": 9427, "conscience</w>": 27310, "conscious</w>": 14914, "consciously</w>": 46755, "consciousness</w>": 17894, "conse": 34887, "consecu": 12084, "consecutive</w>": 12413, "consen": 23110, "consensus</w>": 25071, "consent</w>": 21922, "consequ": 13003, "consequence</w>": 42262, "consequences</w>": 15682, "conserv": 4649, "conservancy</w>": 46729, "conservation": 37616, "conservation</w>": 8322, "conservative</w>": 11421, "conservatives</w>": 17631, "conservatory</w>": 32140, "conserve</w>": 34231, "consi": 2899, "consider": 12471, "consider</w>": 6734, "considerable</w>": 38256, "considerably</w>": 38510, "consideration</w>": 24310, "considerations</w>": 33700, "considered</w>": 9487, "considering</w>": 10761, "considers</w>": 24691, "consist": 10410, "consist</w>": 33735, "consisted</w>": 49354, "consistency</w>": 25683, "consistent</w>": 16439, "consistently</w>": 23799, "consisting</w>": 39241, "consists</w>": 23458, "consol": 27869, "consolation</w>": 38888, "console</w>": 13403, "consoles</w>": 33136, "consoli": 21586, "consolidation</w>": 41111, "consor": 27108, "consortium</w>": 29988, "conspir": 12680, "conspiracy</w>": 15236, "const": 3826, "constable</w>": 29179, "constan": 38718, "constance</w>": 40682, "constant": 32000, "constant</w>": 13111, "constantine</w>": 30640, "constantly</w>": 14336, "constell": 21913, "constellation</w>": 25991, "constitu": 6299, "constituency</w>": 22464, "constituents</w>": 32075, "constitution</w>": 12157, "constitutional</w>": 16091, "constra": 28973, "constraints</w>": 41910, "constru": 3983, "construc": 13321, "construct</w>": 24467, "constructed</w>": 16876, "constructing</w>": 33653, "construction": 48873, "construction</w>": 4585, "constructive</w>": 31810, "consu": 4689, "consul": 5295, "consul</w>": 33630, "consulate</w>": 34341, "consult": 9438, "consult</w>": 26727, "consultancy</w>": 31735, "consultant</w>": 14196, "consultants</w>": 27203, "consultation</w>": 15777, "consultations</w>": 43424, "consulting</w>": 15883, "consume</w>": 28919, "consumed</w>": 29653, "consumer": 34408, "consumer</w>": 10422, "consumers</w>": 14014, "consuming</w>": 30607, "consumption</w>": 14904, "cont": 2036, "cont</w>": 21425, "contact": 39367, "contact</w>": 3523, "contacted</w>": 37331, "contacts</w>": 22789, "contag": 29259, "contagious</w>": 33984, "contain": 9948, "contain</w>": 15187, "contained</w>": 23836, "container</w>": 14913, "containers</w>": 20448, "containing</w>": 20281, "contains</w>": 12844, "contamin": 24662, "contaminated</w>": 35773, "contamination</w>": 31770, "conte": 15402, "conte</w>": 26882, "contempl": 21924, "contemplating</w>": 33854, "contempor": 14538, "contemporary": 16607, "contemporary</w>": 8859, "contemporaryart</w>": 20212, "contempt</w>": 39293, "conten": 42201, "contender</w>": 23573, "contenders</w>": 29711, "content": 15526, "content</w>": 4750, "contentmarketing</w>": 20429, "contents</w>": 14850, "contest": 23103, "contest</w>": 4576, "contestalert</w>": 27313, "contestant</w>": 25682, "contestants</w>": 28062, "contested</w>": 37845, "contests</w>": 32210, "contex": 42015, "context</w>": 13089, "conti": 46431, "conti</w>": 40842, "contin": 1918, "continent</w>": 19623, "continental</w>": 14089, "continents</w>": 38642, "conting": 27104, "contingent</w>": 36467, "continu": 4688, "continually</w>": 34086, "continuation</w>": 38964, "continue</w>": 3942, "continued</w>": 10150, "continues</w>": 4305, "continuing</w>": 11009, "continuity</w>": 34035, "continuous</w>": 17033, "continuously</w>": 29634, "continuum</w>": 44978, "contour</w>": 34733, "contr": 22871, "contra": 9880, "contra</w>": 38620, "contrac": 7581, "contracep": 35109, "contract</w>": 6120, "contracting</w>": 39091, "contractor</w>": 21429, "contractors</w>": 22427, "contracts</w>": 16563, "contradic": 27957, "contrary</w>": 32805, "contrast</w>": 18501, "contrasting</w>": 40758, "contribu": 4753, "contribute</w>": 14112, "contributed</w>": 19397, "contributes</w>": 34203, "contributing</w>": 21762, "contribution</w>": 11116, "contributions</w>": 14465, "contributor</w>": 24553, "contributors</w>": 32908, "contro": 2372, "control": 9963, "control</w>": 3366, "controlled</w>": 14140, "controller</w>": 12929, "controllers</w>": 30374, "controlling</w>": 26427, "controls</w>": 15746, "controversi": 13674, "controversial</w>": 14617, "controversy</w>": 18659, "conv</w>": 48382, "conve": 18421, "conven": 7283, "conveni": 33278, "convenience</w>": 17859, "convenient</w>": 18978, "conveniently</w>": 40844, "convention</w>": 6752, "conventional</w>": 20835, "conventions</w>": 41404, "conver": 6336, "convergence</w>": 35381, "convers": 4577, "conversation</w>": 5690, "conversations</w>": 12326, "converse</w>": 24149, "conversion</w>": 15111, "conversions</w>": 44137, "convert</w>": 20074, "converted</w>": 20808, "converter</w>": 34611, "convertible</w>": 19608, "converting</w>": 34674, "converts</w>": 42470, "convey</w>": 38342, "convic": 11150, "convicted</w>": 18668, "conviction</w>": 24967, "convictions</w>": 44366, "convin": 12889, "convince</w>": 20351, "convinced</w>": 17388, "convincing</w>": 27742, "convo</w>": 19372, "convocation</w>": 30674, "convos</w>": 44842, "convoy</w>": 30292, "conway</w>": 21410, "conwy</w>": 48971, "cony</w>": 14501, "coo": 1664, "coo</w>": 21691, "coogs</w>": 47624, "cook": 9726, "cook</w>": 5977, "cookbook</w>": 21086, "cooke</w>": 29979, "cooked</w>": 11452, "cooker</w>": 23806, "cookery</w>": 38779, "cookie</w>": 9367, "cookies</w>": 8320, "cookin</w>": 46610, "cooking": 39248, "cooking</w>": 6283, "cookout</w>": 39743, "cooks</w>": 24256, "cool": 5594, "cool</w>": 2077, "cooled</w>": 37170, "cooler</w>": 11078, "coolest</w>": 10566, "cooling</w>": 15291, "coom": 41726, "coon": 34260, "coon</w>": 16958, "coop": 39917, "coop</w>": 18910, "cooper": 7264, "cooper</w>": 8133, "cooperate</w>": 42936, "cooperation</w>": 11785, "cooperative</w>": 24517, "coops</w>": 48531, "coordin": 8187, "coordinate</w>": 38250, "coordinated</w>": 32540, "coordinating</w>": 40075, "coordination</w>": 25611, "coordinator</w>": 13967, "coors</w>": 36025, "cop": 3196, "cop</w>": 7070, "copa</w>": 22749, "copd</w>": 45876, "cope": 47635, "cope</w>": 12564, "copeland</w>": 37604, "copen": 15637, "copenhagen</w>": 17390, "coper": 41891, "copernic": 45519, "copied</w>": 36770, "copies</w>": 9851, "coping</w>": 30545, "copolitics</w>": 45846, "copp": 20937, "copped</w>": 42229, "copper": 24741, "copper</w>": 10333, "coppola</w>": 47427, "cops</w>": 10719, "copter</w>": 28049, "copy": 11376, "copy</w>": 4509, "copying</w>": 38925, "copyright</w>": 15778, "cor": 851, "cor</w>": 18559, "cora</w>": 34953, "coral": 31220, "coral</w>": 12054, "corbett</w>": 35699, "corbin</w>": 35578, "corbyn</w>": 14026, "cord": 40893, "cord</w>": 11181, "corden</w>": 41999, "cordi": 41681, "cordless</w>": 44412, "cords</w>": 22164, "core": 19622, "core</w>": 5000, "cores</w>": 37874, "corey": 31279, "corey</w>": 15288, "corgi</w>": 31320, "cori": 26508, "coriander</w>": 37491, "corin": 17716, "corinthians</w>": 34471, "cork": 18148, "cork</w>": 10376, "corn": 5202, "corn</w>": 5894, "cornelius</w>": 45865, "cornell": 38689, "cornell</w>": 20859, "corner": 18509, "corner</w>": 5253, "corners</w>": 19584, "cornerstone</w>": 36280, "cornish</w>": 23774, "cornwall": 37903, "cornwall</w>": 10777, "coron": 13210, "corona</w>": 25564, "coronado</w>": 43946, "coronary</w>": 45955, "coronation</w>": 25014, "coroner</w>": 47241, "corp": 29203, "corp</w>": 10918, "corpor": 4258, "corporal</w>": 42445, "corporate": 33877, "corporate</w>": 6838, "corporation</w>": 11282, "corporations</w>": 25482, "corps</w>": 11330, "corpse</w>": 29408, "corpus</w>": 31672, "correc": 5011, "correct</w>": 8340, "corrected</w>": 35628, "correction</w>": 20843, "correctional</w>": 38030, "corrections</w>": 37507, "correctly</w>": 15359, "correlation</w>": 29218, "correspon": 20203, "correspondent</w>": 29996, "corri": 12974, "corridor</w>": 20592, "corrie</w>": 23961, "corro": 24936, "corro</w>": 42033, "corrosion</w>": 39191, "corru": 6501, "corrup": 30429, "corrupt</w>": 15194, "corruption</w>": 9141, "corsa</w>": 47670, "corsair</w>": 42367, "corset</w>": 40408, "cortex</w>": 40109, "cortez</w>": 30461, "corvette</w>": 24367, "cory": 23221, "cory</w>": 18329, "cos": 5865, "cos</w>": 5700, "cosby</w>": 30324, "cosc": 45944, "coscino</w>": 47909, "cose</w>": 26495, "cosm": 37486, "cosme": 9628, "cosmetic</w>": 23918, "cosmetics</w>": 12896, "cosmic": 47398, "cosmic</w>": 18304, "cosmo": 12829, "cosmo</w>": 32072, "cosmopolitan</w>": 35518, "cosmos</w>": 22151, "cospla": 15149, "cosplay": 42401, "cosplay</w>": 6435, "cosplayer</w>": 30215, "cosplaying</w>": 46701, "cost": 11360, "cost</w>": 4713, "costa</w>": 10480, "costar": 28659, "costarica</w>": 31272, "costco</w>": 31045, "costello</w>": 30667, "costing</w>": 39193, "costly</w>": 30170, "costs</w>": 7628, "costu": 5786, "costume</w>": 7235, "costumes</w>": 15150, "cosy</w>": 22848, "cot": 4718, "cot</w>": 5871, "cote": 44234, "cote</w>": 20751, "cotland</w>": 32576, "cotsw": 23303, "cotswolds</w>": 35546, "cott": 8211, "cott</w>": 11349, "cottage</w>": 12155, "cottages</w>": 34405, "cotton": 22218, "cotton</w>": 7050, "cou": 1368, "couch</w>": 12724, "cougar": 35028, "cougar</w>": 27042, "cougars</w>": 20425, "cough": 35631, "cough</w>": 18498, "cougs</w>": 28482, "coul": 22483, "could": 44812, "could</w>": 1510, "couldn</w>": 4072, "couldnt</w>": 29042, "coulter</w>": 42291, "coun": 939, "counc": 12927, "council": 18187, "council</w>": 3620, "councill": 15732, "councillor</w>": 21179, "councillors</w>": 29695, "councilman</w>": 40833, "councils</w>": 29938, "counsel": 13780, "counsel</w>": 19814, "counseling</w>": 25000, "counsell": 47510, "counselling</w>": 40581, "counselor</w>": 26148, "counselors</w>": 38688, "count": 6073, "count</w>": 5887, "countdown": 39559, "countdown</w>": 7500, "counted</w>": 23149, "counter": 10134, "counter</w>": 7352, "counterfe": 33067, "counterfeit</w>": 44242, "counterpart</w>": 39216, "counterparts</w>": 42106, "counters</w>": 46170, "countess</w>": 46276, "counties</w>": 12338, "counting</w>": 9723, "countless</w>": 21819, "countries</w>": 5489, "country": 7896, "country</w>": 2157, "countryfile</w>": 47023, "countrymusic</w>": 30372, "countryside</w>": 16303, "counts</w>": 12264, "county": 18734, "county</w>": 2116, "coup": 9871, "coup</w>": 16479, "coupe</w>": 16773, "couple": 40136, "couple</w>": 3377, "coupled</w>": 37153, "couples</w>": 14752, "coupling</w>": 45595, "coupon</w>": 14019, "coupons</w>": 23945, "cour": 1391, "coura": 4436, "courage</w>": 9828, "courageous</w>": 25005, "courier</w>": 27217, "cours</w>": 21493, "course": 43225, "course</w>": 2613, "courses</w>": 9464, "court": 16837, "court</w>": 2908, "courte": 5088, "courtesy</w>": 5228, "courthouse</w>": 22205, "courtney": 33601, "courtney</w>": 15990, "courtroom</w>": 41071, "courts</w>": 13514, "courty": 20121, "courtyard</w>": 21900, "cous</w>": 48397, "cousin</w>": 7780, "cousins</w>": 14073, "cout": 29118, "coutinho</w>": 35530, "couture</w>": 14808, "cov": 19384, "cov</w>": 48385, "cove": 21700, "cove</w>": 14708, "coven": 12483, "covenant</w>": 29647, "coventry</w>": 18007, "cover": 13534, "cover</w>": 2202, "coverage</w>": 6810, "covered</w>": 5603, "covering</w>": 9462, "covers</w>": 7745, "covert</w>": 40134, "coveted</w>": 36119, "covington</w>": 43196, "cow": 5076, "cow</w>": 9706, "cowan</w>": 42699, "coward</w>": 33729, "cowards</w>": 48972, "cowboy": 25833, "cowboy</w>": 13657, "cowboys</w>": 11864, "cowboysnation</w>": 43082, "cowell</w>": 39015, "cowgirl</w>": 47090, "coworker</w>": 30727, "coworkers</w>": 30821, "coworking</w>": 36034, "cows</w>": 15204, "cowx</w>": 23831, "cox": 25784, "cox</w>": 11597, "coy": 12765, "coy</w>": 15742, "coyi</w>": 48407, "coyle</w>": 45348, "coyne</w>": 44729, "coyo": 16614, "coyote</w>": 26586, "coyotes</w>": 30423, "coys</w>": 19736, "coz": 39922, "coz</w>": 14282, "cozy</w>": 14873, "cp": 7905, "cp</w>": 9130, "cpa</w>": 30095, "cpac</w>": 45731, "cpc</w>": 26125, "cpd</w>": 23402, "cpec</w>": 48007, "cpfc</w>": 27553, "cpi</w>": 41795, "cpl</w>": 26852, "cpr</w>": 25134, "cps</w>": 27078, "cpt</w>": 32892, "cpu</w>": 27700, "cq": 48910, "cq</w>": 48417, "cr": 1075, "cr</w>": 3483, "cra": 1184, "cra</w>": 18362, "crab": 27382, "crab</w>": 11574, "crabs</w>": 30908, "crack": 11222, "crack</w>": 10334, "crackdown</w>": 29527, "cracked</w>": 19826, "cracker</w>": 16298, "crackers</w>": 26200, "cracking</w>": 13008, "cracks</w>": 21426, "cracy</w>": 24749, "cradle</w>": 29384, "crae</w>": 40438, "craf": 10873, "craft": 7717, "craft</w>": 3588, "craftbeer</w>": 12371, "crafted</w>": 12424, "crafthour</w>": 42324, "crafting</w>": 26886, "crafts": 33276, "crafts</w>": 13383, "craftsman</w>": 39528, "craftsmanship</w>": 36682, "crafty</w>": 32317, "craic</w>": 46962, "craig": 14042, "craig</w>": 8061, "craigslist</w>": 43865, "cram": 29809, "cramer</w>": 44592, "cramps</w>": 46106, "cran": 7761, "cranberries</w>": 49361, "cranberry</w>": 23824, "crane</w>": 14626, "cranes</w>": 26979, "crani": 45674, "crank": 46246, "crank</w>": 32283, "cranston</w>": 44340, "crap</w>": 11899, "crappy</w>": 30475, "crash": 37150, "crash</w>": 5033, "crashed</w>": 16638, "crashes</w>": 17013, "crashing</w>": 24991, "crat</w>": 46696, "crate</w>": 24756, "crater</w>": 22663, "crates</w>": 30172, "cratic</w>": 32175, "crative</w>": 39999, "crats</w>": 43056, "crave</w>": 33397, "craven</w>": 33625, "craving</w>": 18344, "cravings</w>": 34476, "craw": 7400, "crawfish</w>": 42772, "crawford</w>": 15918, "crawl</w>": 20106, "crawler</w>": 41012, "crawley</w>": 42316, "crawling</w>": 37066, "cray": 24184, "cray</w>": 27032, "crayon</w>": 41801, "crayons</w>": 43508, "craz": 25776, "craze</w>": 30637, "craziest</w>": 32690, "craziness</w>": 46436, "crazy": 17540, "crazy</w>": 3578, "crc</w>": 25618, "cre": 798, "cre</w>": 17762, "cream": 23184, "cream</w>": 3867, "creams</w>": 41447, "creamy</w>": 17206, "crease</w>": 48441, "create": 30949, "create</w>": 3380, "created</w>": 4080, "creates</w>": 10361, "creati": 6714, "creating</w>": 5524, "creation": 38293, "creation</w>": 6900, "creations</w>": 17411, "creative": 15237, "creative</w>": 4450, "creatives</w>": 29352, "creativity</w>": 9636, "creator</w>": 10173, "creators</w>": 17981, "creature</w>": 14317, "creatures</w>": 13938, "cred": 7314, "cred</w>": 22377, "credenti": 29487, "credentials</w>": 33422, "credi": 21097, "credibility</w>": 34984, "credible</w>": 32983, "credit": 21467, "credit</w>": 3900, "credited</w>": 32480, "credits</w>": 10654, "creds</w>": 43462, "cree": 33961, "cree</w>": 36014, "creed</w>": 18845, "creek": 26120, "creek</w>": 5526, "creep": 8153, "creep</w>": 26084, "creeper</w>": 38662, "creeping</w>": 29697, "creeps</w>": 45135, "creepy</w>": 11943, "creighton</w>": 42823, "creme</w>": 22681, "creole</w>": 45632, "crepe</w>": 38611, "crescent</w>": 18211, "cress</w>": 39124, "crest": 35985, "crest</w>": 15760, "crested</w>": 36656, "crete</w>": 8584, "crew": 21560, "crew</w>": 3462, "crewe</w>": 43284, "crews</w>": 10463, "cri": 1621, "cri</w>": 38962, "crib</w>": 23271, "cric": 4328, "cricke": 19098, "cricket": 21859, "cricket</w>": 5373, "cricketer</w>": 28439, "cricketers</w>": 43986, "cried</w>": 15290, "cries</w>": 19769, "crime": 13872, "crime</w>": 4896, "crimea</w>": 28614, "crimes</w>": 11827, "crimin": 5874, "criminal": 30197, "criminal</w>": 8255, "criminals</w>": 18783, "crimson</w>": 19437, "cringe</w>": 42588, "cripp": 33588, "cris</w>": 37818, "crises</w>": 36403, "crisis</w>": 5712, "crisp</w>": 15145, "crispr</w>": 39784, "crisps</w>": 35744, "crispy</w>": 16458, "criss</w>": 29708, "cristi": 12699, "cristian</w>": 48808, "cristiano</w>": 14807, "cristina</w>": 33395, "cristo</w>": 38315, "crit": 3613, "crit</w>": 48130, "criteri": 33627, "criteria</w>": 24849, "criterion</w>": 43841, "criti": 25333, "critic": 12417, "critic</w>": 19361, "critical": 15314, "critical</w>": 6808, "critically</w>": 21570, "criticalrole": 33606, "criticalrole</w>": 22742, "criticalrolefanart</w>": 43663, "critici": 20333, "criticism</w>": 17405, "criticize</w>": 46081, "criticized</w>": 41557, "critics</w>": 16946, "critique</w>": 32982, "critters</w>": 35423, "crm</w>": 22610, "cro": 1192, "cro</w>": 22522, "croati": 28072, "croatia</w>": 13323, "croatian</w>": 34795, "croc</w>": 43350, "croche": 35352, "crochet</w>": 17554, "crock": 41685, "crocker</w>": 47843, "crockett</w>": 48313, "crocod": 24519, "crocodile</w>": 24757, "crocs</w>": 38988, "croft</w>": 16657, "croissant</w>": 46011, "croix</w>": 44735, "crom": 25082, "crombie</w>": 46162, "cromwell</w>": 45345, "cron": 17361, "croo": 16443, "crook</w>": 43744, "crooked": 48473, "crooked</w>": 25644, "crooks</w>": 44226, "crop": 40751, "crop</w>": 9955, "cropped</w>": 31139, "crops</w>": 16290, "crore</w>": 18274, "crores</w>": 37281, "cros": 16670, "crosby</w>": 21095, "cross": 5266, "cross</w>": 3417, "crossed</w>": 11731, "crosses</w>": 20473, "crossfit": 47214, "crossfit</w>": 20395, "crossing</w>": 8673, "crossings</w>": 43517, "crossover</w>": 17194, "crossroads</w>": 27427, "crossword</w>": 32945, "crou": 31206, "crouch</w>": 36506, "crow": 3138, "crow</w>": 16019, "crowd": 12036, "crowd</w>": 4570, "crowded</w>": 20182, "crowdfunding</w>": 17971, "crowds</w>": 16092, "crowe</w>": 33560, "crowley</w>": 32287, "crown": 22190, "crown</w>": 6902, "crowned</w>": 16109, "crowns</w>": 33229, "crows</w>": 27134, "croy": 21676, "croydon</w>": 27116, "crs</w>": 28449, "crt</w>": 43877, "cru": 1815, "cru</w>": 29788, "cruci": 18499, "crucial</w>": 12396, "crude</w>": 20677, "cruel": 16073, "cruel</w>": 17573, "cruelty</w>": 20675, "cruis": 27721, "cruise": 36425, "cruise</w>": 6764, "cruiser</w>": 21394, "cruises</w>": 19214, "cruising</w>": 19743, "crum": 43268, "crumb": 48327, "crumb</w>": 39909, "crumble</w>": 36595, "crumbs</w>": 35893, "crun": 17407, "crunch</w>": 16620, "crunchy</w>": 31366, "crusad": 19133, "crusade</w>": 36846, "crusader</w>": 40171, "crusaders</w>": 31319, "crush": 22296, "crush</w>": 7610, "crushed</w>": 18270, "crusher</w>": 44923, "crushes</w>": 35844, "crushing</w>": 20790, "crust</w>": 23136, "crusted</w>": 37314, "cruz": 33689, "cruz</w>": 8403, "cry": 2837, "cry</w>": 6290, "crying</w>": 6828, "cryo": 32215, "cryp": 4865, "crypt</w>": 37814, "cryptic</w>": 46925, "crypto": 8080, "crypto</w>": 9608, "cryptocurrencies</w>": 33329, "cryptocurrency</w>": 12070, "cryst": 15891, "crystal": 17387, "crystal</w>": 6517, "crystalli": 47551, "crystals</w>": 18350, "cs": 11978, "cs</w>": 2804, "csa</w>": 26355, "csc": 41727, "csc</w>": 37266, "csd</w>": 36913, "cse</w>": 41659, "csg</w>": 47085, "csgo</w>": 28928, "csi": 41750, "csi</w>": 28070, "csk</w>": 43036, "csm</w>": 40061, "csn</w>": 46329, "cso</w>": 43864, "csp</w>": 39243, "csr": 32105, "csr</w>": 24598, "csrracing</w>": 44193, "css": 41418, "css</w>": 19846, "cst</w>": 17016, "csu": 35948, "csu</w>": 31261, "csw</w>": 41031, "ct": 3381, "ct</w>": 1122, "cta</w>": 28397, "ctar</w>": 27842, "ctc</w>": 34123, "cte</w>": 31410, "cted</w>": 2910, "ctf</w>": 35250, "cthulhu</w>": 41064, "cting</w>": 7985, "ction": 17578, "ction</w>": 1569, "ctions</w>": 7021, "ctive</w>": 9313, "cto</w>": 17445, "ctor</w>": 8108, "ctr": 35602, "ctr</w>": 18481, "cts</w>": 6936, "ctto</w>": 25118, "ctu": 20834, "cture</w>": 17668, "ctv": 21213, "ctv</w>": 27590, "cu": 729, "cu</w>": 11224, "cuando</w>": 40388, "cub": 16938, "cub</w>": 19972, "cuba</w>": 11576, "cuban</w>": 15536, "cube": 47753, "cube</w>": 11353, "cubes</w>": 31413, "cubic": 48159, "cubic</w>": 29614, "cubs</w>": 9858, "cuck": 26364, "cuckoo</w>": 38062, "cucu": 16705, "cucumber</w>": 19787, "cucumbers</w>": 48065, "cud": 42684, "cudd": 12820, "cuddle</w>": 19568, "cuddles</w>": 24001, "cuddling</w>": 29696, "cuddly</w>": 36208, "cudi</w>": 48713, "cue</w>": 13424, "cuer": 39506, "cues</w>": 35719, "cuff": 34693, "cuff</w>": 22414, "cufflinks</w>": 43938, "cuffs</w>": 37221, "cuis": 9938, "cuisine</w>": 10605, "cuk</w>": 34838, "cul": 1877, "cula</w>": 35935, "cular</w>": 10940, "culars</w>": 45719, "cule</w>": 31066, "cules</w>": 18984, "culin": 14772, "culinary</w>": 16466, "cull": 21880, "cull</w>": 42061, "cullen</w>": 25973, "culmin": 33778, "culo": 36305, "culprit</w>": 41593, "cult</w>": 11965, "cultiv": 16781, "cultivate</w>": 42983, "cultivated</w>": 48901, "cultivation</w>": 41539, "cultur": 20780, "cultural": 34908, "cultural</w>": 6753, "culturally</w>": 36783, "culture": 20197, "culture</w>": 3673, "cultured</w>": 40176, "cultures</w>": 19552, "culver</w>": 42103, "cum": 20142, "cum</w>": 27119, "cumb": 10858, "cumber": 15309, "cumberbatch</w>": 27541, "cumberland</w>": 28747, "cumbri": 32010, "cumbria</w>": 17953, "cumin</w>": 42285, "cumple</w>": 47050, "cumul": 42961, "cumulative</w>": 47610, "cumulus</w>": 46313, "cun": 12423, "cun</w>": 29532, "cunningham</w>": 25321, "cuomo</w>": 25681, "cup": 5059, "cup</w>": 1937, "cupboard</w>": 32074, "cupcake</w>": 17025, "cupcakes</w>": 12747, "cupid</w>": 34885, "cuppa</w>": 28077, "cups</w>": 11463, "cur": 1092, "cur</w>": 33073, "curated</w>": 20341, "curator</w>": 20753, "curb</w>": 21931, "curd</w>": 38881, "cure": 36758, "cure</w>": 9088, "cured</w>": 26248, "cures</w>": 38204, "curfew</w>": 48826, "curi": 12640, "curing</w>": 44169, "curiosity</w>": 21583, "curious</w>": 9865, "curl</w>": 24306, "curled</w>": 43734, "curling</w>": 18543, "curls</w>": 24340, "curly</w>": 20795, "curran</w>": 40999, "currant</w>": 43501, "curren": 6142, "currencies</w>": 23530, "currency</w>": 7853, "current</w>": 3653, "currently</w>": 3792, "currents</w>": 35450, "curric": 16201, "curriculum</w>": 17947, "currie</w>": 39385, "curry": 49285, "curry</w>": 8051, "curse</w>": 18479, "cursed</w>": 26408, "cursor</w>": 46546, "curt</w>": 38137, "curtain</w>": 17223, "curtains</w>": 30223, "curti": 39925, "curtis</w>": 13808, "curve</w>": 15792, "curved</w>": 25789, "curves</w>": 22814, "curvy</w>": 45788, "cus</w>": 2736, "cusa</w>": 47414, "cuse</w>": 37950, "cush": 43731, "cushi": 15333, "cushion</w>": 20853, "cushions</w>": 34163, "cussion</w>": 16658, "cussions</w>": 46853, "cust": 20900, "custard</w>": 26516, "custo": 4376, "custody</w>": 16176, "custom": 2662, "custom</w>": 4996, "custome": 41323, "customer": 24035, "customer</w>": 5102, "customerexperience</w>": 45167, "customers</w>": 5528, "customerservice</w>": 40611, "customiz": 41793, "customizable</w>": 48253, "customization</w>": 48244, "customize</w>": 32179, "customized</w>": 23229, "customs</w>": 16880, "cut": 10511, "cut</w>": 3032, "cute": 16031, "cute</w>": 2242, "cuteness</w>": 19342, "cuter</w>": 27151, "cutest</w>": 8032, "cuth": 44328, "cutie</w>": 10733, "cuties": 40939, "cuties</w>": 23420, "cutiesaturday</w>": 41883, "cutler</w>": 40428, "cutlery</w>": 49073, "cutout</w>": 45016, "cuts</w>": 7435, "cutt": 27338, "cutt</w>": 47647, "cutter</w>": 19719, "cutters</w>": 44783, "cutting</w>": 7266, "cuz</w>": 9215, "cv": 13531, "cv</w>": 13947, "cvs</w>": 29603, "cw": 10652, "cw</w>": 11065, "cwc</w>": 19179, "cwgc</w>": 48527, "cws</w>": 45186, "cx": 44457, "cx</w>": 14283, "cy": 1470, "cy</w>": 1678, "cyber": 5830, "cyber</w>": 10210, "cybercrime</w>": 41772, "cybermonday</w>": 36578, "cyberpunk</w>": 36896, "cybersecurity</w>": 10581, "cyborg</w>": 36650, "cycl": 9791, "cycle": 19083, "cycle</w>": 5072, "cycled</w>": 31055, "cycles</w>": 14605, "cycli": 12201, "cycling": 26353, "cycling</w>": 6321, "cyclist</w>": 20686, "cyclists</w>": 20303, "cyclo": 18122, "cyclone": 48094, "cyclone</w>": 20917, "cyclones</w>": 34669, "cylin": 18569, "cylinder</w>": 22092, "cylinders</w>": 48888, "cymb": 36677, "cymru</w>": 24005, "cyn": 14324, "cynthi": 41994, "cynthia</w>": 23748, "cyp": 14809, "cypress</w>": 25347, "cypri": 36481, "cyprus</w>": 15263, "cyril</w>": 36028, "cyrus</w>": 14204, "cystic</w>": 46131, "cyto": 31864, "cz": 22898, "cz</w>": 22921, "cze": 12152, "czech": 43151, "czech</w>": 16141, "cé": 36454, "cé</w>": 18317, "d": 67, "d</w>": 323, "da": 925, "da</w>": 1140, "daa</w>": 32642, "daan</w>": 44814, "dab": 10413, "dab</w>": 22900, "dac": 16222, "dac</w>": 27478, "daca</w>": 28477, "dach": 34166, "dachsh": 41641, "dachshund</w>": 42720, "dad": 4346, "dad</w>": 2639, "dada</w>": 31325, "daddy": 29466, "daddy</w>": 6546, "dade</w>": 23299, "dades</w>": 28289, "dads</w>": 12741, "dae": 23358, "dae</w>": 15422, "daener": 46934, "daes": 47282, "daesh</w>": 35047, "daf": 9972, "daf</w>": 36704, "daffodils</w>": 44769, "daft</w>": 36347, "dag": 11434, "dag</w>": 25650, "dagger</w>": 34251, "dah": 16976, "dah</w>": 11776, "dahl": 45816, "dahl</w>": 22621, "dahlia</w>": 41768, "dai": 13559, "dai</w>": 10632, "dail": 14676, "dailies</w>": 21260, "daily": 6689, "daily</w>": 2873, "dailynews</w>": 43466, "dailys": 43160, "dailysketch</w>": 46738, "daim": 40421, "dain": 32222, "dain</w>": 28315, "daipur</w>": 47631, "dair": 19998, "dair</w>": 42078, "dairy": 25243, "dairy</w>": 10302, "dairyfree</w>": 49366, "dais": 10502, "daisi": 39947, "daisies</w>": 40654, "daisy": 39310, "daisy</w>": 12865, "dak": 6999, "dak</w>": 16095, "dakar</w>": 31137, "dakota": 38522, "dakota</w>": 12358, "dal": 2476, "dal</w>": 5601, "dala</w>": 42675, "dalai</w>": 41222, "dalail": 35169, "dalailama</w>": 35849, "dale": 11533, "dale</w>": 4677, "dalejr</w>": 38207, "dales</w>": 29031, "daley</w>": 28544, "dalgo</w>": 43614, "dali": 36735, "dali</w>": 25703, "dalit</w>": 45432, "dall</w>": 43631, "dalla": 16772, "dallas": 27414, "dallas</w>": 5759, "dallascowboys</w>": 33016, "dalmati": 44275, "dalton</w>": 21488, "daly</w>": 24873, "dam": 1880, "dam</w>": 4926, "damage</w>": 6822, "damaged</w>": 13568, "damages</w>": 28842, "damaging</w>": 20610, "damas": 23345, "damascus</w>": 25396, "dame</w>": 10069, "dames</w>": 44548, "dami": 17783, "damian": 43307, "damian</w>": 25375, "damien</w>": 25090, "dammit</w>": 31057, "damn": 37409, "damn</w>": 4451, "damned</w>": 28428, "damon": 48503, "damon</w>": 18244, "damp</w>": 26520, "dams</w>": 37680, "dan": 2257, "dan</w>": 2284, "dana": 44834, "dana</w>": 13777, "danao</w>": 38598, "danc": 3945, "dance": 10619, "dance</w>": 2724, "danced</w>": 32891, "dancehall</w>": 33300, "dancer</w>": 11400, "dancers</w>": 13153, "dances</w>": 24083, "dancing": 33280, "dancing</w>": 6226, "dand": 12593, "dandelion</w>": 38903, "dandy</w>": 31932, "dane</w>": 19330, "danes</w>": 47477, "dang": 4283, "dang</w>": 14992, "danger": 20083, "danger</w>": 11212, "dangerous</w>": 7350, "dangerously</w>": 35012, "dangers</w>": 23726, "dangle</w>": 39907, "dani": 3001, "dani</w>": 17009, "daniel": 7859, "daniel</w>": 4981, "daniela</w>": 44466, "danielle": 30396, "danielle</w>": 15292, "danielpadilla</w>": 34702, "daniels</w>": 16146, "danish</w>": 15467, "dank</w>": 31849, "dann": 11951, "danny": 14950, "danny</w>": 7621, "dano</w>": 29703, "dans</w>": 16241, "dant": 48097, "dant</w>": 28237, "dante</w>": 21911, "danube</w>": 44594, "dany</w>": 47816, "dao</w>": 36099, "dap": 12149, "dap</w>": 38034, "daph": 24591, "daphne</w>": 31687, "dapl</w>": 34478, "dapp": 46857, "dapper</w>": 26071, "daq</w>": 25381, "dar": 1377, "dar</w>": 6242, "dara</w>": 17064, "darby</w>": 34366, "darcy</w>": 32916, "dare": 14833, "dare</w>": 9863, "daredevil</w>": 28849, "dares</w>": 42973, "dareto": 46794, "dari": 16292, "dari</w>": 14552, "daria</w>": 45622, "daries</w>": 18184, "daring</w>": 28166, "dario</w>": 33918, "darius</w>": 32606, "darje": 49089, "dark": 5724, "dark</w>": 3144, "darker</w>": 18737, "darkest</w>": 25898, "darkness</w>": 10521, "darling</w>": 13048, "darlings</w>": 39961, "darlington</w>": 34565, "darn</w>": 26059, "darrell</w>": 33522, "darren": 20263, "darren</w>": 12275, "darry": 29200, "darryl</w>": 35359, "darshan</w>": 34564, "dart": 14001, "dart</w>": 19841, "darth": 41304, "darth</w>": 23164, "dartmoor</w>": 31477, "dartmouth</w>": 29667, "darts</w>": 15246, "darwin": 43013, "darwin</w>": 20926, "daryl": 45607, "daryl</w>": 24532, "das": 9940, "das</w>": 7359, "dash": 13858, "dash</w>": 10206, "dashboard</w>": 27679, "dashi": 12876, "dashing</w>": 33825, "dat": 1717, "dat</w>": 9445, "data": 14876, "data</w>": 2281, "datab": 11941, "database</w>": 14678, "databases</w>": 48384, "datac": 27329, "datacenter</w>": 40133, "datasci": 14496, "datascience</w>": 15748, "dataviz</w>": 28138, "date": 34300, "date</w>": 1524, "dated</w>": 13564, "dates</w>": 7228, "dating</w>": 8534, "dation</w>": 15311, "datlantic": 34270, "dato</w>": 36075, "dats</w>": 48674, "dau": 3162, "dau</w>": 33828, "daugh": 42523, "daughter</w>": 3944, "daughters</w>": 13585, "daun": 29470, "dav": 3700, "dav</w>": 46488, "davao</w>": 31502, "dave": 10089, "dave</w>": 5077, "daven": 28350, "davenport</w>": 34624, "davey</w>": 33391, "davi": 1732, "david": 4640, "david</w>": 2259, "davidbowie</w>": 44448, "davido</w>": 35989, "davids</w>": 46695, "davidson</w>": 13166, "davies</w>": 13120, "davin": 43187, "davis": 24426, "davis</w>": 5536, "davison</w>": 43725, "davos</w>": 31887, "davy</w>": 41565, "daw": 5971, "daw</w>": 24404, "dawg</w>": 18660, "dawgs</w>": 26431, "dawn": 30590, "dawn</w>": 7689, "dawson</w>": 18611, "dax</w>": 29458, "day": 1405, "day</w>": 575, "daya</w>": 38165, "daybreak</w>": 33862, "daycare</w>": 36363, "daydream</w>": 41587, "dayin": 20332, "daylight</w>": 20809, "dayo": 29856, "dayo</w>": 46605, "dayof": 16272, "dayofthe": 38043, "days</w>": 1161, "daysof": 12379, "daysofcode</w>": 36537, "daysto": 29886, "daystogo</w>": 42198, "dayswild</w>": 42052, "daytime</w>": 22830, "dayton": 35729, "dayton</w>": 20262, "daytona</w>": 16335, "dayweekend</w>": 44526, "dayz</w>": 35949, "daz": 15449, "daz</w>": 43844, "daze</w>": 33591, "dazz": 17149, "dazzle</w>": 41164, "dazzling</w>": 28821, "db": 19100, "db</w>": 8128, "dbacks</w>": 31175, "dbs</w>": 40558, "dbz</w>": 49226, "dc": 5074, "dc</w>": 2743, "dca</w>": 49107, "dcc</w>": 33747, "dccomics</w>": 17610, "dcfc</w>": 35526, "dci</w>": 35336, "dcs</w>": 42878, "dcu</w>": 42647, "dd": 1353, "dd</w>": 3766, "dda</w>": 35202, "ddad</w>": 39049, "dday": 32689, "dday</w>": 26243, "ddc</w>": 48513, "ddd</w>": 24183, "dddd</w>": 35362, "dden</w>": 5013, "dder</w>": 9300, "dders</w>": 24827, "ddi</w>": 44450, "ddin</w>": 17175, "dding": 48101, "dding</w>": 8974, "ddings</w>": 49106, "ddington</w>": 29238, "ddle": 17633, "ddle</w>": 8357, "ddled</w>": 38392, "ddles</w>": 33901, "ddleston</w>": 25647, "ddling</w>": 30981, "ddlovato</w>": 28244, "ddos</w>": 46463, "ddr</w>": 26027, "dds</w>": 48334, "ddu</w>": 43836, "ddy": 14981, "ddy</w>": 7876, "de": 561, "de</w>": 654, "dea</w>": 18477, "deacon</w>": 29155, "dead": 3906, "dead</w>": 2747, "deadliest</w>": 40811, "deadline": 47209, "deadline</w>": 8458, "deadlines</w>": 44959, "deadly</w>": 10756, "deadpool</w>": 21471, "deaf": 28229, "deaf</w>": 18358, "deal": 7249, "deal</w>": 2696, "dealer</w>": 15218, "dealers</w>": 21697, "dealership</w>": 32096, "dealing</w>": 13138, "deals</w>": 4469, "dealt</w>": 30101, "dean": 13807, "dean</w>": 5828, "deandre</w>": 43635, "deans</w>": 46852, "dear": 15696, "dear</w>": 3817, "dearest</w>": 24880, "dearly</w>": 31880, "deas</w>": 34715, "death": 7163, "death</w>": 2767, "deaths</w>": 12253, "deau</w>": 12399, "deaux</w>": 19883, "deb": 2987, "deb</w>": 25687, "debat": 32082, "debate</w>": 5196, "debates</w>": 19239, "debating</w>": 23472, "debbie": 47186, "debbie</w>": 16735, "debit</w>": 32410, "debor": 16738, "deborah": 40997, "deborah</w>": 22150, "debra</w>": 33233, "debris</w>": 19208, "debt</w>": 8932, "debts</w>": 38770, "debu": 9790, "debun": 33123, "debut": 42608, "debut</w>": 4085, "debuted</w>": 25215, "debuting</w>": 34817, "debuts</w>": 17044, "dec": 3063, "dec</w>": 4628, "deca</w>": 33428, "decad": 29914, "decade</w>": 11099, "decadent</w>": 41716, "decades</w>": 10488, "decal</w>": 26678, "decals</w>": 37606, "decan": 40677, "decat": 35334, "decath": 47455, "decatur</w>": 38540, "decay</w>": 22703, "dece": 3534, "deceased</w>": 30035, "december</w>": 3864, "decent</w>": 10698, "decentr": 28960, "decentralized</w>": 38485, "decep": 33529, "deception</w>": 33046, "deci": 2262, "decide</w>": 8447, "decided</w>": 4939, "decides</w>": 17269, "deciding</w>": 22513, "decision</w>": 5575, "decisions</w>": 9903, "decisive</w>": 28690, "deck": 24885, "deck</w>": 6943, "decked</w>": 39096, "decker</w>": 21449, "decks</w>": 23968, "decl": 7091, "decla": 10739, "declan</w>": 42341, "declar": 18040, "declaration</w>": 19714, "declare</w>": 19856, "declared</w>": 13845, "declares</w>": 23641, "declaring</w>": 33273, "decline</w>": 15084, "declined</w>": 28911, "declines</w>": 40478, "declining</w>": 29221, "deco": 26412, "deco</w>": 16422, "decor": 5148, "decor</w>": 6928, "decorate</w>": 23651, "decorated</w>": 15917, "decorating</w>": 16968, "decoration</w>": 16029, "decorations</w>": 19158, "decorative</w>": 19289, "decre": 12284, "decrease</w>": 24703, "decreased</w>": 33913, "decreasing</w>": 43763, "decree</w>": 43327, "ded": 16744, "ded</w>": 1241, "dedic": 4701, "dedicate</w>": 27610, "dedicated</w>": 6770, "dedication</w>": 10188, "dedly</w>": 36204, "deduc": 22799, "dee": 5268, "dee</w>": 6705, "deed</w>": 30260, "deeds</w>": 24516, "deejay": 48304, "deejay</w>": 44511, "deemed</w>": 28102, "deen": 26456, "deen</w>": 12912, "deep": 5462, "deep</w>": 3383, "deepak</w>": 45528, "deeper</w>": 15224, "deepest</w>": 22245, "deephouse</w>": 35684, "deepi": 19371, "deepika</w>": 34120, "deepikap": 29903, "deepikapadukone</w>": 30646, "deeplear": 22181, "deeplearning</w>": 24362, "deeply</w>": 11449, "deer": 19454, "deer</w>": 8700, "deere</w>": 32901, "dees</w>": 12547, "deets</w>": 35537, "def": 2044, "def</w>": 11649, "defam": 35670, "defamation</w>": 42741, "default</w>": 21650, "defe": 4148, "defeat</w>": 8477, "defeated</w>": 8927, "defeating</w>": 22594, "defeats</w>": 16317, "defect</w>": 44013, "defects</w>": 37485, "defen": 3619, "defence": 30307, "defence</w>": 9659, "defend": 21970, "defend</w>": 11397, "defended</w>": 27161, "defender</w>": 10618, "defenders</w>": 20063, "defending</w>": 13098, "defends</w>": 20134, "defense": 45875, "defense</w>": 6021, "defenseman</w>": 43714, "defenses</w>": 49198, "defensive</w>": 10824, "defi": 17244, "defiance</w>": 36186, "defiant</w>": 47597, "defibrill": 47684, "defic": 18022, "defici": 23387, "deficiency</w>": 30685, "deficit</w>": 20156, "defin": 3188, "define</w>": 14919, "defined</w>": 15278, "defines</w>": 28218, "defining</w>": 20504, "definite</w>": 40793, "definitely</w>": 4824, "definition</w>": 11405, "definitive</w>": 25298, "defl": 31467, "deforestation</w>": 41330, "defstar</w>": 36427, "defy</w>": 39148, "defying</w>": 38496, "deg</w>": 38498, "degra": 28939, "degradation</w>": 44468, "degre": 4653, "degree</w>": 7119, "degrees</w>": 8000, "deh</w>": 35582, "dei": 33833, "dei</w>": 23279, "deir": 42948, "deity</w>": 42574, "deja</w>": 46902, "dek</w>": 23901, "dekalb</w>": 37775, "del": 1233, "del</w>": 2003, "dela</w>": 37986, "delaney</w>": 31528, "delav": 23706, "delavin": 40477, "delavin</w>": 40776, "delavinkisses</w>": 40631, "delaware</w>": 17547, "delay": 12955, "delay</w>": 10934, "delayed</w>": 14567, "delaying</w>": 43781, "delays</w>": 11232, "dele": 7922, "dele</w>": 33431, "delec": 38615, "delectable</w>": 45500, "deleg": 8046, "delegate</w>": 27259, "delegates</w>": 14623, "delegation</w>": 14632, "delete</w>": 19204, "deleted</w>": 16588, "deleting</w>": 41857, "delft</w>": 42749, "delgado</w>": 49182, "delhi": 26723, "delhi</w>": 5717, "deli": 1932, "deli</w>": 18601, "delia</w>": 33193, "deliber": 18316, "deliberate</w>": 38271, "deliberately</w>": 35163, "delic": 13366, "delicacy</w>": 49181, "delicate</w>": 18768, "delici": 19993, "delicious</w>": 3959, "deliciously</w>": 39589, "deliciousness</w>": 42819, "delight": 46165, "delight</w>": 13073, "delighted</w>": 5943, "delightful</w>": 15513, "delights</w>": 25330, "deline</w>": 18797, "delines</w>": 13562, "delish</w>": 25093, "deliver": 19561, "deliver</w>": 7396, "delivered</w>": 7278, "deliveries</w>": 29336, "delivering</w>": 9943, "delivers</w>": 11753, "delivery</w>": 5619, "dell": 24381, "dell</w>": 10242, "della</w>": 22986, "delle</w>": 35963, "deloit": 29428, "deloitte</w>": 38667, "dels</w>": 48636, "delta": 32250, "delta</w>": 8768, "delu": 18779, "delusional</w>": 48059, "delux": 13709, "deluxe</w>": 14056, "delve</w>": 46008, "dely</w>": 15040, "dem": 3251, "dem</w>": 7825, "dema": 40268, "dema</w>": 45046, "deman</w>": 48366, "demand": 13072, "demand</w>": 5650, "demanded</w>": 33699, "demanding</w>": 17099, "demands</w>": 14241, "demar</w>": 46566, "demarcus</w>": 47873, "demb": 35930, "demdebate</w>": 43973, "deme": 25143, "demean": 37376, "demen": 12604, "dementi": 46028, "dementia</w>": 14047, "demetri": 39553, "demi": 32879, "demi</w>": 14480, "demise</w>": 28756, "demo": 2930, "demo</w>": 7380, "democr": 3573, "democracy</w>": 7758, "democrat</w>": 15431, "democratic</w>": 9149, "democrats</w>": 8865, "demographic</w>": 31308, "demol": 19382, "demolished</w>": 26537, "demolition</w>": 22237, "demon": 5635, "demon</w>": 12085, "demonetisation</w>": 41338, "demonic</w>": 46920, "demons</w>": 18388, "demonstr": 8579, "demonstrate</w>": 22231, "demonstrated</w>": 29477, "demonstrates</w>": 24806, "demonstrating</w>": 22107, "demonstration</w>": 16722, "demonstrations</w>": 33964, "demonstrators</w>": 46450, "demos</w>": 19304, "demp": 22490, "dempsey</w>": 30188, "dems</w>": 10989, "demsin": 42664, "demsinphilly</w>": 43091, "den": 1177, "den</w>": 1181, "dena</w>": 32431, "denali</w>": 48076, "dence</w>": 3370, "dency</w>": 11659, "dend": 37447, "dends</w>": 43985, "dene</w>": 45128, "dened</w>": 19571, "deng": 43098, "deng</w>": 41788, "dengue</w>": 41932, "denham</w>": 39180, "deni": 21995, "denial</w>": 25716, "denied</w>": 15780, "denies</w>": 19565, "denim</w>": 13606, "denis": 47630, "denis</w>": 18750, "denise": 45900, "denise</w>": 20899, "denmark</w>": 13268, "dennis": 32738, "dennis</w>": 10534, "denny</w>": 26808, "denomin": 41016, "dens</w>": 16533, "dense</w>": 19353, "density</w>": 22431, "dent": 3593, "dent</w>": 1258, "dental": 24635, "dental</w>": 8382, "dentally</w>": 10346, "dented</w>": 21923, "denti": 4418, "dential</w>": 5459, "dentist</w>": 17816, "dentistry</w>": 25754, "dently</w>": 28817, "denton</w>": 23567, "dents</w>": 1517, "denver": 27847, "denver</w>": 8569, "deny</w>": 18679, "denying</w>": 32771, "denzel</w>": 42503, "deo": 26406, "deo</w>": 12121, "deodor": 47639, "deol</w>": 41902, "deon": 31466, "deon</w>": 16079, "dep": 6079, "dep</w>": 24370, "depar": 10794, "depart": 5343, "depart</w>": 30649, "departed</w>": 32541, "departing</w>": 26902, "department</w>": 5744, "departments</w>": 29523, "departs</w>": 38998, "departure</w>": 17850, "depe": 36118, "depend": 13894, "depend</w>": 27371, "dependence</w>": 40243, "dependent</w>": 23280, "depending</w>": 23673, "depends</w>": 20497, "depic": 11307, "depicted</w>": 34637, "depicting</w>": 24970, "depiction</w>": 31071, "depicts</w>": 29340, "deple": 38504, "deplo": 9356, "deplor": 39232, "deploy</w>": 26944, "deployed</w>": 20009, "deploying</w>": 42212, "deployment</w>": 20183, "depo": 14276, "depor": 36110, "deport": 23389, "deportation</w>": 36617, "deported</w>": 39320, "deportes</w>": 47878, "depos": 21266, "deposit</w>": 16775, "deposits</w>": 30740, "depot</w>": 12589, "depp</w>": 24941, "depre": 7107, "depress": 38869, "depressed</w>": 23269, "depressing</w>": 29235, "depression</w>": 10023, "depri": 28587, "depriv": 45809, "deprivation</w>": 47810, "deprived</w>": 39140, "dept</w>": 9201, "depth</w>": 10350, "depths</w>": 28855, "depu": 6912, "deputies</w>": 24914, "deputy</w>": 7932, "der": 839, "der</w>": 801, "dera</w>": 20696, "derail": 48502, "derby": 13904, "derby</w>": 7177, "derbyshire</w>": 22147, "derdale</w>": 21513, "dere": 5701, "dere</w>": 44194, "dered</w>": 3776, "derek": 22461, "derek</w>": 11205, "derel": 46728, "derer</w>": 11289, "derers</w>": 20882, "deri": 34573, "derick</w>": 33908, "dering</w>": 6076, "deriv": 33458, "derived</w>": 26461, "derland</w>": 35488, "derman</w>": 29740, "dermatology</w>": 48051, "dern</w>": 30086, "dero": 37203, "dero</w>": 34026, "derrick</w>": 21798, "derry": 45777, "derry</w>": 20535, "ders": 37307, "ders</w>": 1923, "derson</w>": 12677, "dery</w>": 17172, "des": 6797, "des</w>": 1437, "desai</w>": 35316, "desc": 13866, "descen": 32318, "descend": 26004, "descend</w>": 46241, "descendants</w>": 36323, "descending</w>": 36620, "descent</w>": 19375, "desch": 49209, "descri": 4637, "describe</w>": 10967, "described</w>": 14671, "describes</w>": 13678, "describing</w>": 24239, "descrip": 41832, "description</w>": 13951, "descriptions</w>": 40653, "desde</w>": 42218, "dese": 27195, "deser": 3659, "desert": 45776, "desert</w>": 7301, "deserted</w>": 41560, "deserve</w>": 7043, "deserved</w>": 10061, "deserves</w>": 9079, "deserving</w>": 26615, "desh": 25320, "desh</w>": 7448, "deshi</w>": 42769, "desi": 6772, "desi</w>": 26635, "desig": 1250, "design": 8359, "design</w>": 1681, "designated</w>": 24119, "designation</w>": 41155, "designed</w>": 4486, "designer": 35640, "designer</w>": 5728, "designers</w>": 12720, "designing</w>": 13467, "designs</w>": 6747, "designthinking</w>": 32450, "desirable</w>": 32368, "desire</w>": 11858, "desired</w>": 28631, "desires</w>": 27598, "desk": 11937, "desk</w>": 6550, "desks</w>": 41014, "desktop</w>": 14345, "desmond</w>": 27821, "desol": 41258, "desp": 3642, "despair</w>": 28097, "desper": 10144, "desperate</w>": 15072, "desperately</w>": 21993, "despic": 32442, "despicable</w>": 37158, "despite</w>": 5325, "dess": 7096, "dess</w>": 10001, "dessert</w>": 9753, "desserts</w>": 22948, "desses</w>": 43913, "dest": 6540, "dest</w>": 4549, "destin": 4934, "destination": 32191, "destination</w>": 9179, "destinations</w>": 16981, "destined</w>": 28525, "destiny": 39875, "destiny</w>": 10867, "destro": 8287, "destroy": 8308, "destroy</w>": 11930, "destroyed</w>": 9965, "destroyer</w>": 25291, "destroying</w>": 19613, "destroys</w>": 27634, "destruc": 22945, "destruction</w>": 14281, "destructive</w>": 29591, "det": 28966, "det</w>": 15366, "deta": 1914, "detached</w>": 26252, "detail</w>": 7657, "detailed</w>": 12609, "detailing</w>": 23163, "details</w>": 2353, "detained</w>": 20260, "dete": 5606, "detec": 17991, "detect</w>": 22744, "detected</w>": 26988, "detecting</w>": 41290, "detection</w>": 16220, "detective</w>": 13672, "detectives</w>": 27994, "detector</w>": 27689, "detectors</w>": 45063, "detention</w>": 16908, "deter": 10742, "deter</w>": 47458, "detergent</w>": 46726, "deterior": 28512, "determin": 8325, "determination</w>": 17410, "determine</w>": 16768, "determined</w>": 14371, "determines</w>": 42192, "determining</w>": 39884, "deth</w>": 38375, "deto": 39710, "deton": 39335, "detour</w>": 31211, "detox</w>": 22459, "detri": 47951, "detro": 6210, "detroit": 19404, "detroit</w>": 7073, "detta</w>": 45438, "dette</w>": 35750, "deu": 21457, "deuce</w>": 45332, "deus</w>": 37625, "deut": 14970, "deutsch": 30389, "deutsche</w>": 32760, "deutschland</w>": 36878, "deux</w>": 47089, "dev": 2797, "dev</w>": 3670, "deva</w>": 45179, "devan": 37072, "devast": 12913, "devastated</w>": 29865, "devastating</w>": 19280, "devastation</w>": 42452, "devel": 1820, "develop": 1966, "develop</w>": 7708, "developed</w>": 8763, "developer</w>": 10929, "developers</w>": 13248, "developing</w>": 8131, "development</w>": 2855, "developmental</w>": 29347, "developments</w>": 17393, "develops</w>": 29895, "deven": 45537, "devgn</w>": 29871, "devi": 12926, "devi</w>": 20717, "deviant": 25593, "deviantart</w>": 26046, "device</w>": 8163, "devices</w>": 9067, "devil": 8894, "devil</w>": 8043, "deville</w>": 34329, "devils</w>": 11683, "devin": 31193, "devin</w>": 20996, "devine</w>": 33019, "devlin</w>": 48040, "devo": 11861, "devo</w>": 43444, "devon": 16205, "devon</w>": 10046, "devops</w>": 21504, "devos</w>": 40646, "devote": 37777, "devoted</w>": 24561, "devotees</w>": 39759, "devotion</w>": 25821, "devotional</w>": 35456, "devs</w>": 27374, "dew": 31952, "dew</w>": 16358, "dewey</w>": 40399, "dex": 10030, "dex</w>": 13790, "dexpo</w>": 42502, "dexter": 45049, "dexter</w>": 22781, "dey</w>": 11829, "dez": 23190, "dez</w>": 8122, "df": 12908, "df</w>": 10468, "dfc</w>": 41903, "dfs</w>": 32880, "dfw</w>": 20439, "dg": 2394, "dg</w>": 9742, "dgate</w>": 41684, "dge": 4016, "dge</w>": 1360, "dged</w>": 11830, "dgeon</w>": 45655, "dgers</w>": 8733, "dges</w>": 5432, "dging</w>": 9565, "dh": 6669, "dh</w>": 9960, "dha": 11629, "dha</w>": 27377, "dhabi</w>": 22349, "dhaka</w>": 32877, "dham": 29635, "dham</w>": 30838, "dhan": 12542, "dhan</w>": 28569, "dhanush": 26162, "dhanush</w>": 36200, "dhanushkraja</w>": 29266, "dhar": 12397, "dharma</w>": 30536, "dhary</w>": 28706, "dhawan</w>": 44699, "dhe": 29706, "dheim</w>": 44280, "dhi": 31553, "dhi</w>": 26166, "dho": 37834, "dhoni</w>": 25698, "dhru": 40257, "dhry</w>": 39960, "dhs</w>": 26849, "dhu": 32387, "di": 570, "di</w>": 1618, "dia": 7351, "dia</w>": 3357, "diab": 15954, "diabe": 19167, "diabete": 43826, "diabetes</w>": 10319, "diabetic</w>": 30230, "diablo</w>": 23931, "diag": 6851, "diagno": 7736, "diagnose</w>": 44429, "diagnosed</w>": 16979, "diagnosis</w>": 15715, "diagnostic</w>": 26351, "diagnostics</w>": 37723, "diagram</w>": 22697, "dial": 18416, "dial</w>": 11381, "dialo": 30709, "dialog</w>": 48945, "dialogue</w>": 11288, "dialogues</w>": 40330, "dialysis</w>": 44798, "diam": 4347, "diameter</w>": 27189, "diamon": 8873, "diamond": 18535, "diamond</w>": 6235, "diamonds</w>": 12687, "dian": 16021, "dian</w>": 4998, "diana</w>": 12803, "diane</w>": 15855, "dianne</w>": 42299, "dians</w>": 21041, "diaper</w>": 34382, "diapers</w>": 39659, "diar": 25932, "diaries</w>": 15541, "diary</w>": 10380, "dias": 22137, "dias</w>": 29354, "diaspora</w>": 28390, "diaz</w>": 17688, "dic": 1404, "dic</w>": 6717, "dicap": 30023, "dicaprio</w>": 30755, "dice</w>": 14406, "dick": 14413, "dick</w>": 9554, "dickens</w>": 33421, "dict": 45360, "dict</w>": 15159, "dictat": 26156, "dictator</w>": 27399, "dictatorship</w>": 37989, "dictionary</w>": 19699, "did": 1861, "did</w>": 1335, "diddy</w>": 33527, "didi</w>": 34396, "didier</w>": 45614, "didn</w>": 2376, "didnt</w>": 13057, "dido</w>": 31725, "didyou": 12295, "didyouknow</w>": 12506, "die": 3150, "die</w>": 2082, "diec": 27729, "diecast</w>": 37936, "died</w>": 3622, "diego": 30940, "diego</w>": 6306, "diem</w>": 45571, "dience</w>": 33686, "dient</w>": 27231, "dier": 29702, "dier</w>": 16394, "dies": 20104, "dies</w>": 1862, "diesel": 46312, "diesel</w>": 10591, "diest</w>": 45739, "diet": 21295, "diet</w>": 6582, "dietary</w>": 29009, "dietrich</w>": 47005, "diets</w>": 35173, "dif": 18656, "dif</w>": 48731, "diff": 44073, "diff</w>": 20331, "diffe": 1967, "differ</w>": 34620, "differen": 14903, "difference</w>": 4731, "differences</w>": 14003, "different</w>": 2731, "differenti": 21729, "differential</w>": 34027, "differentiate</w>": 49032, "differently</w>": 18325, "diffic": 6140, "difficult</w>": 7405, "difficulties</w>": 23468, "difficulty</w>": 25245, "diffu": 31603, "diffuser</w>": 49400, "dig": 1831, "dig</w>": 9887, "dige": 17820, "digest</w>": 20413, "digestion</w>": 40533, "digestive</w>": 32304, "digg": 43240, "digger</w>": 35919, "diggin</w>": 48466, "digging</w>": 14971, "digi": 15627, "digi</w>": 39361, "digimon</w>": 44181, "digit": 14899, "digit</w>": 27472, "digital": 4704, "digital</w>": 2794, "digitalart</w>": 16987, "digitalhealth</w>": 32190, "digitalindia</w>": 46630, "digitally</w>": 27543, "digitalmarketing</w>": 15299, "digitaltransformation</w>": 20047, "digiti": 25935, "digits</w>": 31710, "digni": 45532, "dignit": 39497, "dignity</w>": 17744, "digo</w>": 35701, "digs</w>": 26877, "dih</w>": 43089, "dii</w>": 32755, "dijk</w>": 44444, "dik": 38854, "dik</w>": 37747, "dike</w>": 42683, "dil": 7643, "dil</w>": 17942, "dile": 25428, "dilemma</w>": 29787, "dilig": 30664, "dill": 12318, "dill</w>": 27206, "dillon</w>": 21056, "dilu": 45242, "dim": 19576, "dim</w>": 17523, "dime</w>": 24443, "dimen": 10935, "dimension</w>": 20479, "dimensional</w>": 25252, "dimensions</w>": 25086, "diment</w>": 43500, "dimes</w>": 44888, "dimini": 37459, "dimit": 22250, "dimitri</w>": 48840, "dimp": 38853, "din": 1462, "din</w>": 5673, "dina</w>": 36815, "dinah</w>": 30903, "dine": 20951, "dine</w>": 12989, "diner</w>": 16963, "dinesh</w>": 48341, "ding": 7545, "ding</w>": 796, "dinger</w>": 45580, "dingh": 48064, "dings</w>": 5473, "dington</w>": 24804, "dinho</w>": 47370, "dini</w>": 20196, "dining</w>": 8658, "dinner": 27548, "dinner</w>": 2571, "dinners</w>": 33570, "dino": 9692, "dino</w>": 14077, "dinosa": 18955, "dinosaur</w>": 15095, "dinosaurs</w>": 20387, "dio": 3779, "dio</w>": 1521, "dioce": 20763, "diocese</w>": 27091, "dion": 42899, "dion</w>": 16250, "dior</w>": 23655, "dios</w>": 37563, "dious</w>": 27417, "dioxide</w>": 38102, "dip": 19918, "dip</w>": 11343, "dipl": 8490, "diplo</w>": 38115, "diplom": 11169, "diploma</w>": 21251, "diplomacy</w>": 23798, "diplomat</w>": 32828, "diplomatic</w>": 23782, "diplomats</w>": 44126, "dipped</w>": 30610, "dipper</w>": 49317, "dipping</w>": 33544, "dips</w>": 37522, "dir": 4251, "dir</w>": 8478, "dire": 38355, "dire</w>": 25664, "direc": 1534, "direct": 43224, "direct</w>": 6016, "directed</w>": 8392, "directing</w>": 21817, "direction": 15923, "direction</w>": 5407, "directional</w>": 38687, "directioner</w>": 48042, "directioners</w>": 22055, "directions</w>": 16440, "directive</w>": 40630, "directly</w>": 9701, "director": 20337, "director</w>": 2681, "directorial</w>": 45327, "directors</w>": 11940, "directory</w>": 25272, "directs</w>": 34349, "directv</w>": 48652, "dirk</w>": 28171, "dirt": 31415, "dirt</w>": 11795, "dirty": 20127, "dirty</w>": 7615, "dis": 1518, "dis</w>": 6112, "disa": 3882, "disab": 47380, "disabilities</w>": 17350, "disability": 48986, "disability</w>": 13261, "disabled</w>": 13613, "disadvantaged</w>": 40577, "disagree</w>": 23199, "disapp": 5384, "disappear": 21148, "disappear</w>": 25173, "disappearance</w>": 35929, "disappeared</w>": 23139, "disappearing</w>": 35819, "disappears</w>": 44406, "disappo": 7605, "disappoint</w>": 25446, "disappointed</w>": 13794, "disappointing</w>": 21941, "disappointment</w>": 23884, "disappoints</w>": 48545, "disappro": 48276, "disar": 42971, "disaster</w>": 9072, "disasters</w>": 26976, "disastrous</w>": 35790, "disc": 1472, "disc</w>": 10712, "discar": 40532, "discarded</w>": 45197, "discer": 49140, "dischar": 22671, "discharge</w>": 32485, "disci": 9559, "discip": 38951, "discipl": 10467, "disciples</w>": 39366, "disciplinary</w>": 20232, "discipline</w>": 18903, "disciplines</w>": 42032, "discla": 40248, "disclaimer</w>": 46465, "disclo": 17481, "disclose</w>": 46379, "disclosed</w>": 30905, "disclosure</w>": 26502, "disco": 2475, "disco</w>": 11964, "discography</w>": 47545, "discomfort</w>": 48054, "discord</w>": 23582, "discoun": 18515, "discount</w>": 7638, "discounted</w>": 20993, "discounts</w>": 18186, "discoura": 45850, "discourse</w>": 29441, "discover": 10539, "discover</w>": 4834, "discovered</w>": 6986, "discoveries</w>": 29308, "discovering</w>": 17967, "discovers</w>": 29719, "discovery": 40491, "discovery</w>": 8027, "discre": 20616, "discrimin": 11721, "discrimination</w>": 14775, "discs</w>": 29270, "discu": 1984, "discus</w>": 41828, "discuss</w>": 4312, "discussed</w>": 11300, "discusses</w>": 8116, "discussing</w>": 5900, "discussion</w>": 5060, "discussions</w>": 13806, "dise": 4262, "disease</w>": 5336, "diseases</w>": 12035, "disen": 46468, "disgrace</w>": 29877, "disgraceful</w>": 44146, "disgu": 9793, "disguise</w>": 27803, "disguised</w>": 37149, "disgusted</w>": 41977, "disgusting</w>": 16218, "dish": 11039, "dish</w>": 4531, "disha</w>": 42498, "dishes</w>": 11412, "dishon": 30777, "dishu</w>": 44728, "dishwasher</w>": 40524, "disin": 19484, "disinfe": 48050, "disintegr": 49275, "disk</w>": 17970, "dislike</w>": 30796, "dism": 30836, "dism</w>": 38821, "dismant": 36557, "dismiss</w>": 43287, "dismissal</w>": 42068, "dismissed</w>": 30087, "dismisses</w>": 45238, "disney": 6729, "disney</w>": 4696, "disneyland": 39481, "disneyland</w>": 13661, "disneyworld</w>": 28469, "diso": 26305, "disobe": 42841, "dison</w>": 19310, "disorder</w>": 12635, "disorders</w>": 17114, "disp": 11073, "dispar": 24633, "disparities</w>": 45122, "dispat": 28652, "dispatch</w>": 26306, "dispen": 19077, "dispenser</w>": 40116, "disper": 34499, "displa": 9326, "displac": 17718, "displaced</w>": 22817, "displacement</w>": 37931, "display</w>": 4456, "displayed</w>": 18967, "displaying</w>": 26468, "displays</w>": 15648, "dispo": 13651, "dispon": 38872, "disponible</w>": 46130, "dispos": 45177, "disposable</w>": 37275, "disposal</w>": 28231, "dispro": 32927, "dispropor": 40354, "disproportion": 45492, "disregard</w>": 43869, "disrespect</w>": 34055, "disrespectful</w>": 41723, "disru": 13763, "disrup": 14641, "disrupt</w>": 25214, "disrupted</w>": 46674, "disrupting</w>": 42419, "disruption</w>": 19635, "disruptive</w>": 31554, "diss": 10766, "diss</w>": 35688, "dissec": 43879, "dissemin": 40463, "dissent</w>": 45154, "disser": 25560, "dissertation</w>": 29448, "dissi": 25088, "dissol": 27398, "dissuper</w>": 33461, "dist": 5479, "dist</w>": 12116, "distance</w>": 7964, "distances</w>": 37078, "distant</w>": 18949, "distill": 41586, "distilled</w>": 49179, "distillery</w>": 22200, "distin": 11892, "distinct</w>": 25056, "distinction</w>": 28183, "distinctive</w>": 25486, "distingui": 15053, "distinguish</w>": 45418, "distinguished</w>": 16513, "distor": 23781, "distortion</w>": 43690, "distr": 11885, "distract</w>": 39309, "distracted</w>": 24049, "distraction</w>": 32039, "distress</w>": 26866, "distressed</w>": 37515, "distri": 5987, "distribu": 6138, "distribute</w>": 32313, "distributed</w>": 16419, "distributing</w>": 35216, "distribution</w>": 10484, "distributor</w>": 28354, "distributors</w>": 44240, "distric": 3208, "district": 46683, "district</w>": 3506, "districts</w>": 17565, "distur": 11732, "disturb": 33018, "disturb</w>": 39449, "disturbance</w>": 42416, "disturbed</w>": 29967, "disturbing</w>": 21476, "disupdates</w>": 45667, "dit": 5752, "dit</w>": 2524, "dita</w>": 47965, "ditch": 43715, "ditch</w>": 19291, "dited</w>": 40392, "diti": 2363, "dition": 16452, "dition</w>": 3015, "ditional</w>": 4322, "ditions</w>": 4503, "dito</w>": 43705, "dits</w>": 49374, "dity</w>": 16436, "dium</w>": 2903, "div": 5293, "div</w>": 14869, "diva</w>": 13605, "divas</w>": 23534, "dive": 26042, "dive</w>": 9058, "diver": 13119, "diver</w>": 22094, "divergence</w>": 48735, "divergent</w>": 36132, "divers": 30241, "divers</w>": 27038, "diverse</w>": 11464, "diversi": 24475, "diversion</w>": 38457, "diversity": 35634, "diversity</w>": 6257, "diverted</w>": 41049, "dives</w>": 13893, "divi": 8375, "divid": 31337, "divide</w>": 18842, "divided</w>": 18689, "dividend</w>": 32067, "dividends</w>": 45146, "dividing</w>": 45605, "divin": 21838, "divine": 46919, "divine</w>": 10976, "diving</w>": 9886, "divinity</w>": 39754, "divisi": 39196, "division</w>": 5378, "divisional</w>": 40912, "divisions</w>": 33715, "divor": 13543, "divorce</w>": 17060, "divorced</w>": 39437, "divya</w>": 47767, "diwali</w>": 18218, "dix": 45838, "dix</w>": 27620, "dixie</w>": 24484, "dixit</w>": 28279, "dixon</w>": 16086, "diy": 28472, "diy</w>": 7845, "diya</w>": 36459, "diz</w>": 32740, "dized</w>": 36232, "dizz": 40239, "dizzy</w>": 35464, "dj": 3761, "dj</w>": 3723, "djan": 35338, "django</w>": 46498, "dji": 35284, "dji</w>": 28379, "djing</w>": 36113, "djo": 19432, "djoker": 42721, "djokernole</w>": 42830, "djokovic</w>": 27944, "djs</w>": 18117, "dk": 20702, "dk</w>": 16196, "dl": 12558, "dl</w>": 9373, "dlc</w>": 19079, "dle": 11057, "dle</w>": 3287, "dled</w>": 23494, "dler</w>": 40279, "dles</w>": 7890, "dless</w>": 14997, "dley</w>": 12808, "dling</w>": 18221, "dly</w>": 3069, "dm": 19070, "dm</w>": 4667, "dma</w>": 42903, "dman</w>": 18826, "dmc</w>": 28991, "dmit": 31607, "dmitry</w>": 48326, "dms</w>": 19955, "dmv</w>": 27508, "dmx</w>": 45255, "dn": 11552, "dn</w>": 7459, "dna</w>": 8790, "dnb</w>": 35422, "dnc</w>": 20237, "dnd</w>": 11678, "dnr</w>": 37051, "dns</w>": 39245, "dnt</w>": 26795, "do": 639, "do</w>": 818, "doa</w>": 48332, "dob": 29640, "doba</w>": 35605, "dobbs</w>": 43006, "dobson</w>": 46888, "doc": 3009, "doc</w>": 7251, "doch</w>": 25101, "dock": 17311, "dock</w>": 8997, "docked</w>": 46784, "docker</w>": 31152, "docking</w>": 40845, "docks</w>": 24091, "docs</w>": 15157, "doctor": 7872, "doctor</w>": 5547, "doctoral</w>": 23649, "doctorate</w>": 39134, "doctors</w>": 9705, "doctorwho</w>": 12996, "doctr": 28497, "doctrine</w>": 35612, "docu": 4433, "document": 29293, "document</w>": 15121, "documentaries</w>": 44209, "documentary</w>": 7881, "documentation</w>": 31560, "documented</w>": 22310, "documenting</w>": 37876, "documents</w>": 14105, "dod": 13847, "dod</w>": 30187, "dodd</w>": 36748, "dodge": 31263, "dodge</w>": 12093, "dodgeball</w>": 43244, "dodger</w>": 31641, "dodgers</w>": 12422, "dodgy</w>": 37727, "doe</w>": 13296, "does": 2397, "does</w>": 1897, "doesn</w>": 2503, "doesnt</w>": 17937, "dof": 8277, "doff</w>": 20193, "dofficial</w>": 42516, "dog": 4326, "dog</w>": 1929, "dogcelebration</w>": 41819, "dogday</w>": 27475, "doge</w>": 42187, "dogg</w>": 20749, "doggie</w>": 32237, "doggo</w>": 42155, "doggy</w>": 26359, "doglo": 40733, "dogre": 40030, "dogrescue</w>": 44158, "dogs": 42182, "dogs</w>": 3255, "dogsoftwitter</w>": 19415, "doh</w>": 23581, "doha</w>": 20908, "doherty</w>": 31774, "doi</w>": 36361, "doin</w>": 15412, "doing": 37408, "doing</w>": 1960, "doit": 32272, "doit</w>": 28109, "doj</w>": 25700, "dojo</w>": 35901, "dok": 40547, "dok</w>": 41034, "doka</w>": 46528, "dol": 2287, "dol</w>": 19170, "dola</w>": 38005, "dolan</w>": 27200, "dolby</w>": 42414, "dolce": 30033, "dolce</w>": 30661, "dole</w>": 41040, "doll": 27031, "doll</w>": 9286, "dollar": 35092, "dollar</w>": 7474, "dollars</w>": 10669, "dolls</w>": 15090, "dolly": 43281, "dolly</w>": 23821, "dolom": 37137, "dolores</w>": 40741, "dolph": 8900, "dolph</w>": 22257, "dolphin": 42963, "dolphin</w>": 16464, "dolphins</w>": 14002, "dom": 2164, "dom</w>": 1919, "domain</w>": 15492, "domaine</w>": 48744, "domains</w>": 36358, "dome": 8515, "dome</w>": 9827, "domen": 37584, "domest": 21936, "domestic": 28189, "domestic</w>": 9043, "domin": 4361, "dominance</w>": 30546, "dominant</w>": 20565, "dominate</w>": 21431, "dominated</w>": 23048, "dominates</w>": 34043, "dominating</w>": 29303, "domination</w>": 30919, "domingo</w>": 24882, "dominic": 39007, "dominic</w>": 19095, "dominican</w>": 22934, "dominion</w>": 27155, "domino</w>": 30752, "dominos</w>": 39770, "domo</w>": 44293, "doms</w>": 30126, "don": 1067, "don</w>": 847, "dona</w>": 26789, "donal": 42375, "donald": 5990, "donald</w>": 4335, "donaldson</w>": 37783, "donaldtrump</w>": 6652, "donat": 36384, "donate</w>": 6429, "donated</w>": 8705, "donates</w>": 26960, "donating</w>": 12621, "donation</w>": 7924, "donations</w>": 9928, "doncaster": 38008, "doncaster</w>": 25352, "doncasterisgreat</w>": 47333, "done": 5136, "done</w>": 1700, "donegal</w>": 24172, "donesia</w>": 41281, "donet": 33724, "donetsk</w>": 33999, "dong": 26242, "dong</w>": 31478, "dongha": 28365, "donghae</w>": 28945, "donia</w>": 24014, "donkey</w>": 21415, "donkeys</w>": 44644, "donna</w>": 9158, "donne": 30897, "donnein": 38308, "donneinarte</w>": 40193, "donnell</w>": 35118, "donnelly</w>": 39070, "donnie": 47058, "donnie</w>": 30609, "donny": 37291, "donny</w>": 32887, "dono": 14840, "donor</w>": 18013, "donors</w>": 17887, "donovan</w>": 21499, "dons</w>": 22127, "dont": 8094, "dont</w>": 4632, "donut</w>": 18471, "donuts</w>": 13970, "doo": 4543, "doo</w>": 11643, "doodle</w>": 9388, "doodled</w>": 41030, "doodles</w>": 22156, "doodling</w>": 37548, "dooley</w>": 47609, "doom": 23263, "doom</w>": 14344, "doomed</w>": 33251, "doomsday</w>": 41791, "doon</w>": 36612, "doop</w>": 33886, "door": 7188, "door</w>": 2489, "doors</w>": 4228, "doorstep</w>": 19533, "doorway</w>": 46575, "dop": 42381, "dop</w>": 31722, "dope": 42587, "dope</w>": 10094, "doping</w>": 30285, "dopp": 21774, "doppelg": 45216, "doppler</w>": 42540, "dor": 2766, "dor</w>": 8695, "dora</w>": 18104, "dorado</w>": 32350, "dorchester</w>": 32656, "dore</w>": 39423, "dores</w>": 34323, "dorf</w>": 17296, "dori</w>": 49270, "doria</w>": 43186, "dorian</w>": 44016, "doris</w>": 24285, "dork</w>": 36206, "dorm</w>": 24263, "doro": 15498, "doro</w>": 37389, "dorothy</w>": 20805, "dors</w>": 31240, "dorset": 42109, "dorset</w>": 16047, "dorsey</w>": 41607, "dortmund</w>": 24290, "dory</w>": 36135, "dos": 44258, "dos</w>": 5474, "dose</w>": 11497, "doses</w>": 37873, "dossier</w>": 46042, "dost</w>": 44222, "dot": 7473, "dot</w>": 7004, "dota</w>": 23085, "dotcom</w>": 12443, "dote</w>": 31202, "dothis</w>": 47864, "dotnet</w>": 43124, "dotorg</w>": 46587, "dots</w>": 19019, "dotted</w>": 47950, "dou": 1756, "dou</w>": 23608, "doub": 19631, "double": 13013, "double</w>": 3200, "doubled</w>": 24948, "doubleheader</w>": 34668, "doubles</w>": 12539, "doubling</w>": 36850, "doubt": 37071, "doubt</w>": 8671, "doubts</w>": 30894, "douche</w>": 44292, "doug": 20271, "doug</w>": 10758, "dough": 15785, "dough</w>": 14983, "doughnut</w>": 32555, "doughnuts</w>": 31124, "dougie</w>": 46317, "dougla": 9140, "douglas</w>": 10065, "douglass</w>": 45692, "doun</w>": 44785, "dov</w>": 38856, "dova</w>": 26551, "dove": 27511, "dove</w>": 18281, "dover": 43019, "dover</w>": 14683, "doves</w>": 47067, "dow": 8022, "dow</w>": 10688, "dowell</w>": 27344, "down": 1833, "down</w>": 1136, "downe</w>": 46501, "downed</w>": 35814, "downer</w>": 42522, "downers</w>": 43739, "downey</w>": 29429, "downfall</w>": 48702, "downhill</w>": 27387, "downing</w>": 28140, "download": 35076, "download</w>": 3794, "downloadable</w>": 49105, "downloaded</w>": 22961, "downloading</w>": 30519, "downloads</w>": 26481, "downpour</w>": 39034, "downpours</w>": 40160, "downs</w>": 10706, "downside</w>": 41937, "downstairs</w>": 28174, "downstream</w>": 43822, "downtime</w>": 41964, "downton": 45023, "downton</w>": 42668, "downtown": 18230, "downtown</w>": 5061, "downward</w>": 37430, "dowski</w>": 43556, "dox": 44786, "dox</w>": 14510, "doyle</w>": 17728, "doyou": 27256, "doz</w>": 31106, "dozen</w>": 16401, "dozens</w>": 17883, "dp": 23820, "dp</w>": 6465, "dprint</w>": 46644, "dprinting</w>": 16194, "dprk</w>": 47920, "dps</w>": 34288, "dq</w>": 28741, "dr": 1084, "dr</w>": 1701, "dra": 1114, "dra</w>": 7402, "drac": 20168, "dracing</w>": 41253, "dracula</w>": 25405, "draf": 37426, "draft": 30624, "draft</w>": 5198, "drafted</w>": 19129, "drafting</w>": 33528, "drafts</w>": 29194, "drag": 8452, "drag</w>": 12463, "dragged</w>": 27884, "dragging</w>": 37069, "dragon": 9187, "dragon</w>": 5471, "dragonball": 40959, "dragoncon</w>": 47802, "dragonfly</w>": 32824, "dragons</w>": 10203, "dragrace</w>": 40762, "drags</w>": 45368, "drain": 23347, "drain</w>": 19467, "drainage</w>": 25953, "drained</w>": 44630, "drains</w>": 43638, "drainthe": 47337, "drake": 32504, "drake</w>": 8958, "dral</w>": 7503, "dram": 6937, "dram</w>": 32170, "drama</w>": 5055, "dramas</w>": 33467, "dramati": 43512, "dramatic</w>": 11240, "dramatically</w>": 24495, "drank</w>": 21712, "draped</w>": 49113, "drastic</w>": 43159, "drastically</w>": 35478, "drau": 18621, "draw": 17675, "draw</w>": 4001, "drawer</w>": 23219, "drawers</w>": 38975, "drawing": 36996, "drawing</w>": 3610, "drawings</w>": 13397, "drawn</w>": 8893, "draws</w>": 12043, "dray": 25562, "drayton</w>": 49044, "drc</w>": 21434, "dre": 960, "dre</w>": 14584, "dread": 17412, "dread</w>": 31403, "dreaded</w>": 47227, "dreadful</w>": 35846, "dreality</w>": 48367, "dream": 4595, "dream</w>": 2984, "dreambig</w>": 46495, "dreamcast</w>": 47226, "dreamed</w>": 27984, "dreamer</w>": 25692, "dreamers</w>": 27194, "dreaming</w>": 11662, "dreamliner</w>": 49143, "dreams</w>": 4405, "dreamt</w>": 43743, "dreamteam</w>": 40090, "dreamy</w>": 23517, "dred</w>": 10903, "dredge</w>": 48783, "dren": 29068, "dren</w>": 47309, "drenched</w>": 46378, "dres": 48852, "dres</w>": 44697, "dresden</w>": 34836, "dress": 12622, "dress</w>": 2595, "dressage</w>": 36144, "dressed</w>": 6559, "dresser</w>": 26346, "dresses</w>": 8184, "dressing</w>": 6348, "drew": 18792, "drew</w>": 5281, "drex": 33985, "drey": 48271, "dri": 1203, "dri</w>": 28833, "drian</w>": 36870, "dribb": 42153, "dric</w>": 23448, "dridge</w>": 22956, "drie</w>": 40170, "dried</w>": 16037, "drier</w>": 39877, "dries</w>": 33857, "drif": 33585, "drift</w>": 18194, "drifting</w>": 30276, "drill</w>": 11626, "drilled</w>": 46338, "drilling</w>": 18634, "drills</w>": 24378, "drin": 3375, "drin</w>": 47133, "drink": 14131, "drink</w>": 3979, "drinking</w>": 5778, "drinklocal</w>": 45998, "drinks</w>": 6732, "drip</w>": 24050, "dripping</w>": 38787, "dris</w>": 35804, "drive": 11402, "drive</w>": 2620, "driven</w>": 9314, "driver": 27563, "driver</w>": 4383, "driverless</w>": 46769, "drivers</w>": 7384, "drives</w>": 11441, "driveway</w>": 26273, "driving": 37800, "driving</w>": 4161, "drizzle</w>": 28240, "drm</w>": 39674, "dro": 1494, "dro</w>": 12442, "drogba</w>": 49199, "droid</w>": 38016, "drome</w>": 9157, "dron": 43898, "dron</w>": 23360, "drone": 33557, "drone</w>": 9397, "drones</w>": 14006, "droo": 30715, "drool</w>": 41554, "drooling</w>": 44360, "drop": 16407, "drop</w>": 3387, "dropbox</w>": 47216, "dropped</w>": 6792, "dropping</w>": 8339, "drops</w>": 6437, "dros</w>": 47033, "drou": 38558, "drought</w>": 13935, "drove</w>": 13753, "drow": 21159, "drown</w>": 28571, "drowned</w>": 34005, "drowning</w>": 24618, "drs</w>": 21257, "dru": 2275, "dru</w>": 49048, "drug": 20601, "drug</w>": 5600, "drugs</w>": 8021, "druid</w>": 40297, "drum": 13353, "drum</w>": 8698, "drummer</w>": 13618, "drummers</w>": 46191, "drumming</w>": 35480, "drummond</w>": 42213, "drums</w>": 11690, "drun": 15488, "drunk": 37398, "drunk</w>": 8232, "drunken</w>": 28196, "drupal</w>": 46481, "drush</w>": 43009, "drwho</w>": 48342, "dry": 13544, "dry</w>": 4501, "dryer</w>": 24425, "drying</w>": 23203, "ds": 3361, "ds</w>": 646, "dsa</w>": 47607, "dsb": 47168, "dsb</w>": 14257, "dsburg</w>": 47237, "dsc</w>": 37240, "dsd</w>": 45383, "dsley</w>": 40740, "dslr</w>": 33740, "dsm</w>": 39502, "dson</w>": 40310, "dsp</w>": 45291, "dss</w>": 41580, "dstv</w>": 35027, "dt": 13104, "dt</w>": 7427, "dthe": 13863, "dtla</w>": 31885, "dtm</w>": 42407, "dts</w>": 46233, "du": 691, "du</w>": 3686, "dua</w>": 25244, "dual": 39739, "dual</w>": 5347, "duane</w>": 38946, "dub": 14526, "dub</w>": 13144, "duba": 5485, "dubai": 32599, "dubai</w>": 5985, "dubbed</w>": 27740, "dublin": 20707, "dublin</w>": 6145, "dubnation</w>": 47329, "dubois</w>": 48046, "dubrov": 46709, "dubrovnik</w>": 48724, "dubs</w>": 27013, "dubstep</w>": 38303, "dubu": 43257, "duc": 979, "duc</w>": 36446, "ducati</w>": 28570, "ducation</w>": 17197, "duce</w>": 3660, "duchess</w>": 21713, "duck": 12708, "duck</w>": 6910, "ducks</w>": 11202, "duct</w>": 26829, "dude": 48087, "dude</w>": 5710, "dudes</w>": 14449, "dudley</w>": 27324, "due</w>": 2887, "duel</w>": 27143, "dues</w>": 37646, "duet</w>": 25457, "duf": 38713, "duff": 38071, "duff</w>": 21934, "duffy</w>": 23599, "dug": 22743, "dug</w>": 21000, "dugg": 40523, "duggan</w>": 46169, "dugout</w>": 36831, "duh</w>": 26716, "dui</w>": 29693, "duk</w>": 14160, "duke": 18402, "duke</w>": 7732, "dukes</w>": 27914, "dul": 6738, "dulce</w>": 44872, "dulil": 32565, "dulkar</w>": 47980, "dull</w>": 19433, "dulu": 28865, "duluth</w>": 32109, "dulwich</w>": 47343, "dum": 13400, "dum</w>": 11564, "dumb": 15901, "dumb</w>": 12464, "dumbass</w>": 38980, "dummies</w>": 40899, "dummy</w>": 34246, "dump": 12655, "dump</w>": 17146, "dumped</w>": 23768, "dumping</w>": 31707, "dumplings</w>": 35495, "dumps</w>": 45804, "dumpster</w>": 45467, "dun": 2616, "dun</w>": 18284, "dunbar</w>": 41453, "duncan": 31084, "duncan</w>": 13502, "dundal": 38185, "dundas</w>": 39300, "dundee</w>": 18619, "dune": 32833, "dune</w>": 28208, "dunedin</w>": 40121, "dunes</w>": 23526, "dung</w>": 33712, "dungeon": 28812, "dungeon</w>": 22931, "dungeons</w>": 42572, "dungeonsand": 34970, "dungeonsanddragons</w>": 35497, "dunham</w>": 42501, "duni</w>": 43454, "dunk</w>": 17222, "dunkin": 48022, "dunkin</w>": 36415, "dunkirk</w>": 46928, "dunks</w>": 48977, "dunlop</w>": 34753, "dunn</w>": 19185, "dunne</w>": 38538, "dunno</w>": 24502, "duo</w>": 8696, "dup": 36805, "dup</w>": 10445, "duper</w>": 44850, "duplex</w>": 41186, "duplic": 28992, "dupont</w>": 35994, "dur": 4355, "dur</w>": 23230, "dura": 28173, "dura</w>": 47382, "durability</w>": 43671, "durable</w>": 22285, "duran</w>": 28185, "durango</w>": 44443, "durant</w>": 24861, "duras</w>": 27518, "duration</w>": 31663, "durban</w>": 24474, "dure</w>": 19108, "durga</w>": 38456, "durham": 26765, "durham</w>": 14335, "during</w>": 1590, "dus</w>": 9931, "dusa</w>": 28546, "dusk</w>": 19708, "dust": 29723, "dust</w>": 8349, "dusted</w>": 38274, "duster</w>": 46280, "dustin": 42423, "dustin</w>": 21235, "dusting</w>": 41756, "dusty</w>": 22029, "dut": 32625, "dutch": 22277, "dutch</w>": 7991, "duter": 21624, "duterte</w>": 22371, "duties</w>": 19603, "dutt</w>": 30081, "dutton</w>": 42771, "duty</w>": 6458, "duval</w>": 42459, "duvet</w>": 48006, "dux</w>": 28562, "dv": 4288, "dv</w>": 26265, "dvd</w>": 7170, "dvds</w>": 36655, "dvn</w>": 29811, "dvr</w>": 29210, "dw": 8455, "dw</w>": 19997, "dwar": 13487, "dwarf</w>": 22643, "dwayne</w>": 31395, "dwell": 27549, "dwell</w>": 18755, "dwelling</w>": 37098, "dwight</w>": 22473, "dwp</w>": 46976, "dwts</w>": 30220, "dwyer</w>": 43878, "dx": 22717, "dx</w>": 15679, "dy": 1444, "dy</w>": 907, "dyce</w>": 48325, "dye": 37159, "dye</w>": 15997, "dyed</w>": 24906, "dyer</w>": 29495, "dyes</w>": 39874, "dying</w>": 5115, "dyk</w>": 12142, "dyke</w>": 32632, "dylan": 21004, "dylan</w>": 9900, "dyn": 44289, "dyn</w>": 30669, "dynam": 5735, "dynamic</w>": 10057, "dynamics</w>": 14329, "dynamite</w>": 29003, "dynamo</w>": 28281, "dynasty</w>": 14593, "dyne</w>": 42756, "dyou": 11484, "dyour": 22525, "dys": 11022, "dys</w>": 38384, "dysfunction</w>": 36865, "dysfunctional</w>": 40757, "dysle": 33681, "dyslexia</w>": 43199, "dyson</w>": 34475, "dyssey</w>": 17435, "dystop": 28276, "dystopian</w>": 38915, "dz": 24421, "dz</w>": 22913, "dé": 25466, "dü": 46948, "dÃŃ": 46988, "e": 68, "e</w>": 324, "ea": 2150, "ea</w>": 8100, "eable</w>": 20693, "each": 31442, "each</w>": 2416, "eachother</w>": 40792, "ead": 42556, "ead</w>": 45523, "eae</w>": 27446, "eag": 3743, "eager</w>": 21551, "eagerly</w>": 30094, "eagle": 20207, "eagle</w>": 7517, "eagles</w>": 6920, "eal": 48872, "ealing</w>": 40484, "eames</w>": 49072, "eamon": 45954, "ean</w>": 13327, "ear": 1055, "ear</w>": 8373, "earbuds</w>": 47807, "eared</w>": 9127, "earl": 30573, "earl</w>": 14235, "earle</w>": 40292, "earlier</w>": 4297, "earliest</w>": 22097, "early": 15840, "early</w>": 2090, "earn": 33977, "earn</w>": 8465, "earned</w>": 8898, "earnest</w>": 45422, "earning</w>": 14550, "earnings</w>": 15912, "earns</w>": 16760, "earp</w>": 35296, "earphones</w>": 44905, "earring</w>": 28664, "earrings</w>": 9136, "ears</w>": 9861, "eart": 7086, "earth": 5184, "earth</w>": 3475, "earthand": 34229, "earthandclouds</w>": 34480, "earthday</w>": 19481, "earthquake</w>": 10060, "earthquakes</w>": 32895, "earthy</w>": 47139, "earts</w>": 38824, "eas": 5740, "ease</w>": 13574, "easier</w>": 8817, "easiest</w>": 26314, "easily</w>": 8197, "easing</w>": 44825, "easport": 42251, "east": 5022, "east</w>": 2602, "eastbound</w>": 28827, "eastbourne</w>": 38455, "eastenders</w>": 23545, "easter": 14783, "easter</w>": 4811, "eastern": 34522, "eastern</w>": 6311, "eastman</w>": 48280, "easton</w>": 29619, "eastside</w>": 42650, "eastwood</w>": 28270, "easy": 18308, "easy</w>": 3176, "eat": 5418, "eat</w>": 3384, "eaten</w>": 16750, "eater</w>": 24060, "eaters</w>": 37645, "eatery</w>": 46559, "eating</w>": 4371, "eatlocal</w>": 42868, "eaton</w>": 28462, "eats</w>": 13188, "eau</w>": 17608, "eazy</w>": 36536, "eb": 12283, "eb</w>": 8677, "eba</w>": 40889, "ebay": 34412, "ebay</w>": 4099, "eber": 34020, "ebo</w>": 46635, "ebola</w>": 15864, "ebon": 22013, "ebony</w>": 30651, "ebook</w>": 13122, "ebooks</w>": 25774, "ec": 747, "ec</w>": 10879, "eca</w>": 18465, "ecar": 34500, "ecb</w>": 26205, "ecc</w>": 33128, "eccc</w>": 47401, "eccentric</w>": 43228, "eccle": 27494, "ece</w>": 2163, "eces</w>": 5905, "ecg</w>": 45983, "ech": 15797, "ech</w>": 31147, "echel": 41233, "echo": 17366, "echo</w>": 13989, "echoes</w>": 32564, "eci": 31936, "eck": 25866, "eck</w>": 15969, "ecker": 39661, "ecker</w>": 40890, "ecla": 47806, "eclec": 25114, "eclectic</w>": 28382, "eclip": 30841, "eclipse</w>": 11505, "eclub</w>": 38983, "eco": 5106, "eco</w>": 10077, "ecofriendly</w>": 43412, "ecol": 22706, "ecological</w>": 25127, "ecology</w>": 18578, "ecommerce</w>": 15529, "econ": 26755, "econ</w>": 21158, "econom": 2768, "economic": 36649, "economic</w>": 5259, "economical</w>": 48782, "economically</w>": 39406, "economics</w>": 12625, "economies</w>": 27136, "economist</w>": 18836, "economists</w>": 43701, "economy</w>": 5644, "ecor": 28962, "ecosystem</w>": 15788, "ecosystems</w>": 28725, "ecoun": 27924, "ecr</w>": 48572, "ecraft</w>": 11439, "ecs</w>": 23485, "ecstasy</w>": 47286, "ecstatic</w>": 36244, "ect</w>": 25168, "ecu": 13087, "ecu</w>": 32919, "ecuador</w>": 19813, "ecz": 43530, "ed": 843, "ed</w>": 538, "eda</w>": 10804, "edad</w>": 44724, "eday": 39258, "edc</w>": 21245, "edchat</w>": 14702, "edd</w>": 35431, "eddi": 42930, "eddie": 22748, "eddie</w>": 9517, "eddy</w>": 25959, "ede": 29632, "eded</w>": 19555, "edel": 20460, "edelman</w>": 48139, "eden": 23621, "eden</w>": 13741, "eder</w>": 16249, "edes</w>": 36247, "edfringe</w>": 27402, "edg": 35955, "edgar": 33543, "edgar</w>": 17914, "edge": 16914, "edge</w>": 5461, "edged</w>": 39188, "edges</w>": 20938, "edgy</w>": 35393, "edi": 8750, "edi</w>": 27148, "edible</w>": 19795, "edic": 25184, "edics</w>": 30641, "edin": 6524, "edinburgh": 27574, "edinburgh</w>": 8068, "eding</w>": 5742, "edison</w>": 25846, "edit": 8239, "edit</w>": 8013, "edited</w>": 13945, "edith</w>": 28597, "editing</w>": 10178, "edition</w>": 3062, "editions</w>": 21664, "editor</w>": 7661, "editorial</w>": 12325, "editors</w>": 19486, "edits</w>": 24945, "edm": 37843, "edm</w>": 13539, "edmon": 11275, "edmond</w>": 41581, "edmonds</w>": 46520, "edmonton": 37311, "edmonton</w>": 15058, "edmun": 36561, "edmund</w>": 27567, "edna</w>": 39002, "edo": 29145, "edo</w>": 18096, "edon</w>": 41467, "edor": 30184, "edou": 47678, "edp</w>": 46066, "eds</w>": 1941, "edsheeran</w>": 30386, "edt</w>": 15071, "edtech": 41825, "edtech</w>": 15262, "edu": 11757, "edu</w>": 11799, "eduardo</w>": 30604, "educ": 2200, "educate</w>": 17563, "educated</w>": 21447, "education": 22358, "education</w>": 2806, "educational</w>": 10400, "educator</w>": 19875, "educators</w>": 15420, "edwar": 27586, "edward": 26184, "edward</w>": 7450, "edwards</w>": 12627, "edwin": 48718, "edwin</w>": 22471, "edy": 17072, "edy</w>": 4144, "ee": 2644, "ee</w>": 4708, "eed</w>": 17513, "eee": 24632, "eee</w>": 9361, "eeee": 11696, "eeee</w>": 17570, "eeeee</w>": 26938, "eeeeee</w>": 41407, "eek</w>": 46591, "eel</w>": 27462, "eels</w>": 44416, "eem</w>": 27236, "een": 47490, "een</w>": 21230, "eer": 35409, "eer</w>": 31846, "eera</w>": 36664, "eerie</w>": 33846, "ees</w>": 40308, "eet</w>": 48935, "eez</w>": 39033, "ef": 1490, "ef</w>": 1829, "efa</w>": 16999, "eface</w>": 48804, "efan": 33556, "efc</w>": 22065, "efcc</w>": 46087, "efer</w>": 26199, "eff": 20548, "eff</w>": 21715, "effe": 2808, "effec": 3943, "effect</w>": 5436, "effective</w>": 6837, "effectively</w>": 17516, "effectiveness</w>": 26847, "effects</w>": 7331, "effic": 36004, "efficacy</w>": 39937, "effici": 6670, "efficiency</w>": 11823, "efficient</w>": 11334, "efficiently</w>": 32915, "effor": 6356, "effort": 40078, "effort</w>": 6255, "effortless</w>": 41639, "effortlessly</w>": 42320, "efforts</w>": 6847, "efish</w>": 35813, "efl</w>": 27172, "efron</w>": 48111, "efs</w>": 7389, "eg": 8053, "eg</w>": 14599, "ega</w>": 41193, "egan</w>": 42943, "eger": 46704, "eger</w>": 22767, "egg": 13778, "egg</w>": 5911, "eggplant</w>": 34906, "eggs</w>": 7099, "ego": 34712, "ego</w>": 14250, "egos</w>": 43992, "egre": 27044, "egret</w>": 42002, "egy": 5224, "egyp": 10250, "egypt</w>": 7267, "egyptian</w>": 12428, "eh": 9277, "eh</w>": 9135, "eha</w>": 48563, "ehealth</w>": 48617, "ehr": 45271, "ehs</w>": 44648, "ei": 4006, "ei</w>": 18264, "eic": 40251, "eid": 28038, "eid</w>": 13979, "eidmubarak</w>": 46275, "eiffel</w>": 29720, "eigh": 13468, "eight</w>": 7910, "eighteen</w>": 49316, "eighth</w>": 21237, "eighty</w>": 47449, "eil": 29457, "eileen</w>": 31468, "ein": 29944, "ein</w>": 24524, "eindhoven</w>": 47172, "eing</w>": 7702, "einstein</w>": 20587, "eira</w>": 47708, "eis": 13802, "eisen": 25273, "eisenhower</w>": 35562, "either</w>": 6036, "ej": 19887, "ej</w>": 25009, "ejec": 29771, "ek": 4212, "ek</w>": 2092, "el": 544, "el</w>": 832, "ela": 11284, "ela</w>": 3787, "elab</w>": 38866, "elabor": 26034, "elaborate</w>": 33855, "elaine</w>": 22523, "elan": 17763, "elan</w>": 18399, "eland": 24930, "eland</w>": 6275, "elas</w>": 41078, "elast": 27479, "elastic</w>": 30282, "elba</w>": 48598, "elbow</w>": 21965, "eld</w>": 5684, "elder": 11791, "elder</w>": 14416, "elderly</w>": 15455, "elders</w>": 28617, "eldest</w>": 33503, "elding</w>": 28223, "elds</w>": 13466, "ele": 2084, "ele</w>": 9766, "eleague</w>": 36577, "eleanor</w>": 18604, "elearning</w>": 29969, "elec": 1564, "elec</w>": 38768, "elect</w>": 15336, "elected</w>": 8828, "election": 19312, "election</w>": 4247, "electionday</w>": 40540, "elections</w>": 6949, "elector": 16465, "electoral</w>": 19544, "electr": 3654, "electra</w>": 48959, "electri": 23927, "electric": 19547, "electric</w>": 5031, "electrical</w>": 12176, "electrician</w>": 46422, "electricity</w>": 10950, "electrifying</w>": 48843, "electro": 11648, "electro</w>": 23244, "electromagnetic</w>": 46530, "electron</w>": 33396, "electronic": 33865, "electronic</w>": 9273, "electronica</w>": 43119, "electronics</w>": 13081, "eled</w>": 20357, "elee</w>": 44112, "eleg": 8075, "elegance</w>": 19146, "elegant</w>": 11124, "elek": 34559, "elem</w>": 25406, "element</w>": 14909, "elementary</w>": 8143, "elements</w>": 10925, "elen": 30654, "elen</w>": 39164, "elena</w>": 19421, "eleng": 48180, "eleph": 7554, "elephant</w>": 10299, "elephants</w>": 16871, "eler</w>": 24646, "eless": 15244, "eless</w>": 30837, "elets</w>": 19400, "elev": 7921, "elevate</w>": 26736, "elevated</w>": 23967, "elevation</w>": 23826, "elevator</w>": 19021, "eleven": 31617, "eleven</w>": 17795, "elf": 45961, "elf</w>": 11924, "elfie</w>": 39955, "elg": 28790, "elgin</w>": 31868, "eli": 1018, "eli</w>": 6292, "elia</w>": 10956, "elian</w>": 42508, "elias": 47274, "elias</w>": 29902, "elic": 34743, "elic</w>": 13492, "elie": 38677, "elie</w>": 26501, "elier</w>": 14634, "elife": 37429, "elife</w>": 12719, "eligibility</w>": 34937, "eligible</w>": 16978, "elijah</w>": 26065, "elike": 48913, "elim": 9296, "elimin": 11386, "eliminate</w>": 19655, "eliminated</w>": 29075, "eliminating</w>": 36619, "elimination</w>": 24176, "elin": 25353, "elin</w>": 13458, "eline": 46199, "eline</w>": 7153, "eling</w>": 9990, "elio</w>": 47943, "elion</w>": 30682, "elions</w>": 44159, "eliot</w>": 33326, "elis": 23411, "elis</w>": 48021, "elisa": 25610, "elisa</w>": 44051, "elisabeth</w>": 33127, "elise</w>": 27124, "elit": 40882, "elite": 32277, "elite</w>": 6553, "elited": 43943, "elitedangerous</w>": 47138, "elites</w>": 35975, "elius</w>": 35623, "elive": 49338, "elive</w>": 23505, "elives</w>": 49174, "elix": 32926, "elixir</w>": 42887, "eliz": 42844, "eliza": 6132, "eliza</w>": 29992, "elizabeth": 22397, "elizabeth</w>": 7026, "elk": 34013, "elk</w>": 21896, "ell": 826, "ell</w>": 812, "ella": 20692, "ella</w>": 2957, "elland</w>": 43326, "ellar</w>": 38443, "ellas</w>": 37053, "elle": 12818, "elle</w>": 4765, "elled</w>": 13146, "ellen": 14007, "ellen</w>": 12312, "ellenshow</w>": 34812, "eller": 20927, "eller</w>": 4465, "ellers</w>": 19010, "elles</w>": 24431, "elli": 3367, "elli</w>": 6673, "ellic": 38905, "ellie</w>": 16769, "ellier</w>": 44054, "ellin</w>": 40374, "elling</w>": 2220, "ellington</w>": 34477, "ellini</w>": 43256, "elliot</w>": 20761, "elliott": 44456, "elliott</w>": 13788, "ellip": 44816, "ellis</w>": 11553, "ellison</w>": 32295, "ello</w>": 2512, "ellor</w>": 14594, "ells</w>": 2433, "ellu": 35560, "elly": 8041, "elly</w>": 20355, "elm": 25199, "elm</w>": 22082, "elman</w>": 33622, "elmer</w>": 45958, "elmo</w>": 32150, "elo": 6170, "elo</w>": 13490, "elon": 26381, "elon</w>": 20406, "elondon</w>": 47377, "elong": 44363, "elonmusk</w>": 37076, "elope</w>": 23367, "eloqu": 37795, "elos</w>": 44733, "elot</w>": 43490, "elove": 43319, "elove</w>": 19165, "elover</w>": 21732, "elovers</w>": 33946, "els": 35958, "els</w>": 1645, "elsa</w>": 22050, "else": 18857, "else</w>": 3344, "elsewhere</w>": 22906, "elson</w>": 19624, "elt</w>": 18692, "elton</w>": 20758, "elu": 14208, "elusive</w>": 28903, "elves</w>": 29111, "elvi": 47008, "elvis": 47359, "elvis</w>": 14498, "elxn</w>": 37726, "ely": 12189, "ely</w>": 1273, "elyn": 29691, "elyn</w>": 18126, "em": 908, "em</w>": 2270, "ema": 7002, "ema</w>": 11131, "emabiggest": 23101, "emabiggestfans</w>": 29587, "email": 33537, "email</w>": 4462, "emailed</w>": 40470, "emailmarketing</w>": 40188, "emails</w>": 12871, "eman": 24416, "eman</w>": 36868, "emancip": 42996, "emanuel</w>": 35232, "emb": 3692, "embar": 8266, "embaras": 48019, "embark</w>": 33953, "embarra": 11382, "embarrass": 27183, "embarrassed</w>": 28217, "embarrassing</w>": 19653, "embarrassment</w>": 41346, "embassy</w>": 13598, "embe": 46041, "embed": 19703, "embedded</w>": 22046, "embelli": 32144, "embellished</w>": 46992, "ember</w>": 47049, "emblem</w>": 21163, "embo": 23065, "embr": 35267, "embrac": 16928, "embrace</w>": 12118, "embraced</w>": 35739, "embraces</w>": 38404, "embracing</w>": 22196, "embro": 12550, "embroi": 18667, "embroide": 21530, "embroidered</w>": 22381, "embroidery</w>": 20823, "emc": 20897, "emc</w>": 31602, "emcee</w>": 42038, "eme": 22910, "eme</w>": 21548, "emea</w>": 40352, "emed</w>": 11028, "emen</w>": 22033, "ement": 40841, "ement</w>": 2057, "ements</w>": 11058, "emer": 3132, "emer</w>": 25727, "emerald": 46878, "emerald</w>": 16980, "emerge</w>": 22182, "emerged</w>": 26425, "emergen": 24096, "emergence</w>": 39867, "emergencies</w>": 35759, "emergency": 44038, "emergency</w>": 5897, "emerges</w>": 30801, "emerging": 38174, "emerging</w>": 11113, "emeritus</w>": 35333, "emerson</w>": 24147, "emery</w>": 32678, "emi": 44327, "emi</w>": 18525, "emil": 26794, "emil</w>": 40624, "emile</w>": 43926, "emili": 20709, "emilia</w>": 34238, "emilio</w>": 39722, "emily": 14545, "emily</w>": 7640, "emin": 17227, "emin</w>": 23995, "eminem</w>": 22129, "eminent</w>": 33779, "eming</w>": 40398, "emir": 13337, "emir</w>": 47613, "emirates": 47244, "emirates</w>": 17867, "emission</w>": 27761, "emissions</w>": 14172, "emit</w>": 49043, "emma": 18177, "emma</w>": 7445, "emmanuel": 48045, "emmanuel</w>": 20411, "emmett</w>": 45779, "emmy": 35625, "emmy</w>": 17089, "emmys</w>": 21875, "emo": 3738, "emo</w>": 19381, "emoji</w>": 16327, "emojis</w>": 27870, "emon</w>": 34406, "emor": 45034, "emory</w>": 44274, "emotion</w>": 17464, "emotional</w>": 7357, "emotionally</w>": 24088, "emotions</w>": 12904, "emp": 3831, "emp</w>": 41004, "empathy</w>": 22420, "emper": 12522, "emperor</w>": 13828, "empha": 16237, "emphasi": 47176, "emphasis</w>": 29588, "empire": 26212, "empire</w>": 7614, "empires</w>": 46510, "emplo": 3409, "employ": 37290, "employ</w>": 39626, "employe": 5037, "employed</w>": 26567, "employee": 36631, "employee</w>": 9560, "employees</w>": 7377, "employer</w>": 21296, "employers</w>": 17647, "employment</w>": 10959, "empor": 27386, "emporium</w>": 48541, "empower": 13612, "empower</w>": 17230, "empowered</w>": 29087, "empowering</w>": 20086, "empowerment</w>": 15747, "empowers</w>": 46206, "empress</w>": 26656, "empty": 41203, "empty</w>": 7893, "emra": 39259, "ems</w>": 2858, "emt</w>": 46360, "emu": 48149, "emu</w>": 29296, "emul": 23272, "emy": 31076, "en": 524, "en</w>": 576, "ena</w>": 3452, "enab": 17308, "enable</w>": 15642, "enabled</w>": 23666, "enables</w>": 23417, "enabling</w>": 23590, "enam": 41486, "enamel</w>": 22746, "enary</w>": 13132, "enas</w>": 34536, "enation</w>": 20860, "enberg</w>": 15658, "enburg</w>": 28430, "enc</w>": 33169, "enca</w>": 37774, "encan": 30345, "encapsul": 40874, "ence": 6495, "ence</w>": 954, "enced</w>": 6549, "ences</w>": 3777, "enchan": 17290, "enchanted</w>": 28258, "enchanting</w>": 32531, "enchil": 47396, "enci": 32207, "encia</w>": 30068, "encies</w>": 18729, "encing</w>": 10326, "enclosed</w>": 43243, "enclosure</w>": 37419, "encom": 44026, "encore</w>": 20549, "encoun": 17309, "encounter</w>": 13164, "encountered</w>": 32492, "encounters</w>": 25399, "encoura": 6169, "encourage</w>": 12090, "encouraged</w>": 20299, "encouragement</w>": 24959, "encourages</w>": 23848, "encouraging</w>": 15875, "encro": 45822, "encry": 28600, "encryp": 42928, "encrypted</w>": 48710, "encryption</w>": 31423, "ency</w>": 3484, "encyclo": 32104, "encyclopedia</w>": 38376, "end": 945, "end</w>": 806, "enda</w>": 6735, "endale</w>": 20290, "endange": 13990, "endangered</w>": 14931, "ende": 11373, "ende</w>": 40306, "endeav": 18134, "endeavor</w>": 40502, "endeavors</w>": 44394, "endeavour</w>": 38035, "ended</w>": 2622, "endemic</w>": 41241, "endent</w>": 16265, "ender": 48106, "ender</w>": 12383, "enders</w>": 7418, "endez</w>": 43850, "endgame</w>": 23042, "endi</w>": 31359, "ending</w>": 2695, "endings</w>": 36516, "endish</w>": 38841, "endless</w>": 12688, "endlessly</w>": 45145, "endment</w>": 45894, "endo": 13476, "endo</w>": 15830, "endocr": 36486, "endof": 40786, "endome": 46996, "endon</w>": 48018, "endor": 8092, "endorf</w>": 37249, "endorse</w>": 28819, "endorsed</w>": 24307, "endorsement</w>": 21205, "endorses</w>": 34603, "endorsing</w>": 46779, "endow": 45895, "endra</w>": 22321, "ends</w>": 1339, "endthe": 46256, "endu": 26032, "endur": 19557, "endurance</w>": 21027, "endure</w>": 32419, "enduring</w>": 30851, "enduro</w>": 47042, "ene": 3297, "ene</w>": 6049, "ened</w>": 2494, "eneed": 45137, "enegger</w>": 33235, "enei</w>": 48906, "enemies</w>": 15824, "enemy</w>": 10310, "enen</w>": 45113, "ener": 2244, "ener</w>": 13600, "energ": 39451, "energetic</w>": 24197, "energi": 23044, "energies</w>": 42374, "energized</w>": 48635, "energy": 14974, "energy</w>": 2650, "energye": 32271, "energyefficiency</w>": 40586, "eners</w>": 48208, "enes</w>": 42066, "eness</w>": 11806, "enet</w>": 46336, "enew": 29672, "enews</w>": 13442, "eney</w>": 20706, "enez</w>": 33110, "enf": 38167, "enfield</w>": 27808, "enfor": 10592, "enforce</w>": 40224, "enforced</w>": 44597, "enforcement</w>": 12460, "eng": 1035, "eng</w>": 6730, "enga</w>": 22297, "engag": 6793, "engage</w>": 11089, "engaged</w>": 11475, "engagement</w>": 7281, "engaging</w>": 13060, "enge": 26279, "enge</w>": 2742, "engel": 38265, "engen</w>": 48286, "enger</w>": 6618, "engers</w>": 7533, "engine": 3355, "engine</w>": 5857, "engineer": 40151, "engineer</w>": 8517, "engineered</w>": 26580, "engineering</w>": 5273, "engineers</w>": 11494, "engines</w>": 14487, "england": 20904, "england</w>": 3595, "english": 15942, "english</w>": 3469, "engra": 17560, "engraved</w>": 29421, "engraving</w>": 33309, "engul": 43655, "engv": 28401, "enh": 7449, "enhall</w>": 48781, "enham</w>": 24592, "enhan": 26827, "enhance</w>": 13993, "enhanced</w>": 16070, "enhancement</w>": 35601, "enhances</w>": 38259, "enhancing</w>": 25986, "eni": 4395, "eni</w>": 17538, "enic": 46780, "enic</w>": 28292, "enig": 19754, "enig</w>": 48730, "enight": 32848, "enight</w>": 20640, "enigma</w>": 34998, "ening</w>": 1133, "enium</w>": 34380, "enix</w>": 25720, "enjo": 1498, "enjoy": 12981, "enjoy</w>": 2218, "enjoyable</w>": 17444, "enjoyed</w>": 5045, "enjoying</w>": 3603, "enjoyment</w>": 34905, "enjoys</w>": 17024, "enka</w>": 43942, "enko</w>": 25312, "enlar": 38136, "enligh": 21364, "enlighten": 28200, "enlightened</w>": 44032, "enlightening</w>": 44005, "enlightenment</w>": 29255, "enlisted</w>": 43555, "enly</w>": 43023, "enn</w>": 43563, "enna</w>": 8095, "enne": 21176, "enne</w>": 11518, "ennedy</w>": 46266, "ennes</w>": 43613, "enni": 7049, "ennial</w>": 14220, "ennis": 48923, "ennis</w>": 26309, "eno": 9429, "eno</w>": 12843, "enoch</w>": 47917, "enor": 13955, "enormous</w>": 20129, "enos</w>": 44759, "enote</w>": 44955, "enough</w>": 2744, "enow</w>": 26876, "enqu": 28417, "enqui": 22810, "enquire</w>": 46658, "enquiries</w>": 31901, "enquiry</w>": 45141, "enri": 18915, "enrich": 20058, "enrich</w>": 45504, "enriched</w>": 45166, "enrichment</w>": 32903, "enrique</w>": 25489, "enrol": 44279, "enroll": 23739, "enroll</w>": 30366, "enrolled</w>": 36853, "enrollment</w>": 24875, "enroute</w>": 40548, "ens": 41799, "ens</w>": 1323, "ense": 12657, "ense</w>": 27658, "ensemble</w>": 14843, "ensis</w>": 32842, "ensla": 37535, "enslaved</w>": 48675, "ensure</w>": 7492, "ensures</w>": 29707, "ensuring</w>": 19403, "ent": 724, "ent</w>": 621, "enta</w>": 17681, "ental": 32342, "ental</w>": 6168, "entary</w>": 9833, "entation</w>": 37412, "ente": 17433, "ente</w>": 9935, "ented</w>": 3800, "entennial</w>": 43088, "enter": 2963, "enter</w>": 3819, "entered</w>": 10679, "entering</w>": 12580, "enterpri": 7339, "enterprise</w>": 9220, "enterprises</w>": 21219, "enters</w>": 15287, "entertain": 5566, "entertain</w>": 23510, "entertained</w>": 30631, "entertainer</w>": 28674, "entertaining</w>": 13897, "entertainment</w>": 6166, "entes</w>": 24213, "enthr": 36202, "enthusi": 9631, "enthusiasm</w>": 20525, "enthusiast</w>": 27153, "enthusiastic</w>": 22068, "enthusiasts</w>": 27514, "enti": 1938, "ential</w>": 5194, "entially</w>": 37695, "entic</w>": 10340, "entine</w>": 49212, "enting</w>": 20526, "entire</w>": 4709, "entirely</w>": 13911, "entirety</w>": 43242, "entit": 15209, "entities</w>": 38134, "entitled</w>": 18680, "entity</w>": 28455, "ently</w>": 2922, "ento": 21917, "ento</w>": 8762, "entom": 31676, "entourage</w>": 47893, "entr": 7129, "entrance</w>": 9129, "entrata</w>": 27304, "entre": 34188, "entre</w>": 19600, "entren": 46959, "entrepre": 4583, "entreprene": 4789, "entrepreneu": 26784, "entrepreneur": 12119, "entrepreneur</w>": 8033, "entrepreneurial</w>": 28261, "entrepreneurs</w>": 11054, "entrepreneurship</w>": 12858, "entries</w>": 13766, "entry</w>": 5362, "ents</w>": 870, "entu": 6650, "enty</w>": 5657, "enu": 23430, "env": 32280, "env</w>": 39207, "envel": 20052, "envelope</w>": 27358, "envir": 3512, "enviro</w>": 46200, "environ": 3599, "environment": 33039, "environment</w>": 5501, "environmental</w>": 7831, "environmentally</w>": 32855, "environments</w>": 19577, "envision</w>": 49031, "envoy</w>": 29263, "envy</w>": 21017, "eny</w>": 20482, "enya</w>": 36509, "enyc</w>": 39520, "enz": 25805, "enz</w>": 31873, "enza</w>": 25239, "enzie</w>": 14839, "enzo</w>": 31543, "enzyme</w>": 40348, "enzymes</w>": 47465, "eo": 16054, "eo</w>": 11712, "eoin</w>": 48634, "eon</w>": 31915, "eos</w>": 17805, "ep": 1178, "ep</w>": 1117, "epa</w>": 15866, "epage</w>": 26931, "epaper</w>": 33584, "epcot</w>": 32524, "eper</w>": 43071, "eph": 45752, "eph</w>": 41240, "ephe": 25129, "epi": 7219, "epi</w>": 34641, "epic": 12683, "epic</w>": 4991, "epiconetsy</w>": 49222, "epide": 17382, "epidemi": 44447, "epidemic</w>": 21522, "epile": 23150, "epilepsy</w>": 29547, "epilo": 31291, "epilots</w>": 39766, "epiph": 40561, "epiphany</w>": 43251, "epis": 24616, "episcop": 28037, "episcopal</w>": 31221, "episo": 2708, "episode</w>": 2965, "episodes</w>": 11837, "epit": 21967, "epitome</w>": 35114, "epl</w>": 25950, "epo": 25810, "epp": 39054, "epp</w>": 39593, "eps</w>": 4090, "epsilon</w>": 40019, "epsom</w>": 40364, "epstein</w>": 34688, "eq": 39331, "eq</w>": 33692, "equ": 2563, "equal": 17373, "equal</w>": 10433, "equality": 48981, "equality</w>": 9578, "equally</w>": 18172, "equals</w>": 30278, "equation</w>": 28591, "equations</w>": 38225, "eque": 19518, "equestrian</w>": 24728, "equi": 8752, "equili": 43262, "equine</w>": 33801, "equinox</w>": 32652, "equip": 6526, "equip</w>": 36979, "equipment</w>": 6893, "equipo</w>": 45688, "equipped</w>": 18331, "equitable</w>": 44717, "equities</w>": 44015, "equity</w>": 11293, "equivalent</w>": 19489, "er": 517, "er</w>": 528, "era": 30548, "era</w>": 2072, "erable</w>": 18801, "erad": 24194, "eradic": 36346, "eradicate</w>": 46164, "eral</w>": 6222, "eran</w>": 13069, "eras": 19325, "eras</w>": 39090, "erase</w>": 33893, "erased</w>": 46762, "erasmus</w>": 38935, "erc": 5360, "erc</w>": 32382, "erd</w>": 25645, "erdo": 21112, "erdogan</w>": 24453, "ere": 17907, "ere</w>": 642, "erec": 21526, "erected</w>": 39365, "ered</w>": 9097, "eres</w>": 15751, "ergon": 38120, "ergy</w>": 19550, "eri": 2769, "eri</w>": 9509, "eria</w>": 11634, "erial</w>": 5409, "eric": 1206, "eric</w>": 5396, "erica</w>": 13208, "erich</w>": 26070, "erick": 27434, "erick</w>": 36959, "erickson</w>": 45286, "ericsson</w>": 39645, "eridge</w>": 45408, "erie</w>": 7005, "eries</w>": 9099, "erik": 22805, "erik</w>": 16532, "erika</w>": 25531, "erin": 17532, "erin</w>": 11333, "erina</w>": 25176, "ering</w>": 1785, "erit": 23335, "eritrea</w>": 30738, "erjee</w>": 41665, "erly</w>": 14380, "erm</w>": 31649, "erman</w>": 17990, "ern": 6992, "ern</w>": 12140, "ernal</w>": 20868, "ernan</w>": 34617, "ernation</w>": 48796, "erne": 33930, "ernest</w>": 23006, "ernie</w>": 23636, "ernity</w>": 14653, "erno</w>": 40812, "ernst</w>": 30099, "ero": 3211, "ero</w>": 3732, "erock</w>": 38206, "eron</w>": 32837, "eroom</w>": 46690, "eros</w>": 30597, "erose</w>": 48657, "erosion</w>": 30174, "erotic</w>": 30708, "erotica</w>": 39126, "erous</w>": 6384, "eroy</w>": 36461, "erp</w>": 28268, "err": 22479, "err</w>": 25346, "erra</w>": 48446, "errands</w>": 45485, "error</w>": 12097, "errors</w>": 21195, "erry": 45236, "erry</w>": 24124, "ers": 4840, "ers</w>": 612, "ersfc</w>": 37925, "ership</w>": 2884, "erson": 25780, "erson</w>": 6811, "ert": 40325, "ert</w>": 3112, "erta</w>": 32007, "erton</w>": 26245, "erts</w>": 12921, "eru": 36068, "erun</w>": 41642, "erup": 17093, "erupted</w>": 48862, "eruption</w>": 33705, "erville</w>": 37557, "erwin</w>": 43724, "ery": 12467, "ery</w>": 1692, "erz</w>": 38711, "es": 957, "es</w>": 542, "esa": 46834, "esa</w>": 12489, "esanders</w>": 23099, "esc": 3330, "esc</w>": 28420, "escal": 15902, "escap": 11499, "escape": 32484, "escape</w>": 7568, "escaped</w>": 18707, "escapes</w>": 29916, "escaping</w>": 21767, "escar": 39229, "escence</w>": 37972, "esch": 46760, "esch</w>": 41945, "esco": 32482, "escobar</w>": 48807, "escor": 24360, "escort</w>": 24976, "escorted</w>": 47667, "escorts</w>": 48574, "escu": 36517, "esday</w>": 19553, "ese": 18766, "ese</w>": 2260, "esg</w>": 41674, "esh": 17119, "esh</w>": 13407, "esha</w>": 28799, "eshop": 38451, "eshop</w>": 45570, "eshopsuk</w>": 39349, "esi</w>": 30064, "esis</w>": 12414, "esk": 19359, "esl</w>": 26201, "eso": 29890, "eso</w>": 28921, "esof": 17047, "eson": 46845, "esp": 3849, "esp</w>": 13870, "espa": 37301, "espan": 41731, "españa</w>": 41118, "especially</w>": 4878, "esper": 29216, "espino": 46633, "espionage</w>": 43498, "espn": 22917, "espn</w>": 7540, "espnu</w>": 47747, "espo": 34381, "esports</w>": 16035, "espresso</w>": 17098, "esq</w>": 47352, "esqu": 34616, "esque</w>": 25877, "ess": 3118, "ess</w>": 9764, "essa</w>": 39125, "essay</w>": 12751, "essays</w>": 27328, "esse</w>": 22305, "essen</w>": 30489, "essence</w>": 17830, "essenti": 11163, "essential": 47264, "essential</w>": 6895, "essentially</w>": 30042, "essentials</w>": 16191, "essex": 30563, "essex</w>": 11623, "est": 2291, "est</w>": 1509, "esta": 41449, "esta</w>": 10135, "estab": 7010, "establi": 8412, "establish</w>": 19709, "established</w>": 13143, "establishing</w>": 29420, "establishment</w>": 20213, "estas</w>": 39072, "estate": 47130, "estate</w>": 6159, "estates</w>": 26054, "este": 12968, "este</w>": 20579, "esteban</w>": 48381, "esteem</w>": 31541, "esteemed</w>": 36293, "ester</w>": 45808, "esthe": 18468, "esther</w>": 24393, "estim": 8904, "estimate</w>": 21883, "estimated</w>": 16665, "estimates</w>": 21957, "esto": 31589, "esto</w>": 23958, "estonia</w>": 26260, "estonian</w>": 48895, "estrada</w>": 48116, "estre": 31271, "estu": 26272, "estuary</w>": 35269, "esur": 35758, "esville</w>": 39187, "esy": 46268, "et": 1169, "et</w>": 875, "eta</w>": 8761, "etal</w>": 25221, "etary</w>": 13074, "etc</w>": 5353, "etched</w>": 40411, "etching</w>": 41375, "ete": 38820, "ete</w>": 40245, "eter": 8587, "eter</w>": 17007, "eternal</w>": 13732, "eternally</w>": 48486, "eternity</w>": 23832, "eters</w>": 18392, "etf</w>": 31661, "eth": 4819, "eth</w>": 5927, "ethan": 24245, "ethan</w>": 15958, "ethanol</w>": 38166, "ethe": 21312, "ethel</w>": 45921, "ether": 23349, "ethere": 18705, "ethereal</w>": 40925, "ethereum</w>": 19612, "ethernet</w>": 35026, "ethi": 10327, "ethic</w>": 39104, "ethical": 47041, "ethical</w>": 17679, "ethics</w>": 13355, "ethiop": 10897, "ethiopia</w>": 13920, "ethiopian</w>": 24507, "ethnic": 30522, "ethnic</w>": 16344, "ethnicity</w>": 46787, "ethno": 34225, "ethos</w>": 48768, "eti": 11188, "eti</w>": 30394, "etienne</w>": 46118, "eties</w>": 15137, "etihad</w>": 38489, "etiquette</w>": 37957, "etis": 38216, "etisation</w>": 39733, "etna</w>": 41940, "eto": 27829, "eto</w>": 33837, "eton</w>": 44339, "etour</w>": 41462, "etr": 23012, "etres</w>": 42838, "ets</w>": 3442, "etsy": 13237, "etsy</w>": 6282, "etsym": 22902, "etsymntt</w>": 25416, "etsyshop</w>": 44643, "ett": 32729, "ett</w>": 24998, "etta</w>": 30466, "ette": 19981, "ette</w>": 5212, "ettes</w>": 35326, "etto</w>": 44219, "etty</w>": 40759, "etu": 36593, "etv": 49155, "etv</w>": 20325, "etwork</w>": 20585, "ety": 25920, "ety</w>": 2746, "etz": 36181, "etz</w>": 25301, "eu": 1506, "eu</w>": 3238, "eucalyp": 41068, "eucalyptus</w>": 42351, "euchar": 38362, "eugen": 30678, "eugene</w>": 17760, "eul": 46749, "eun": 16431, "eun</w>": 26219, "eunhyuk</w>": 47526, "eup": 44435, "euph": 21386, "euphoria</w>": 41051, "eur": 18343, "eur</w>": 12018, "eura": 32605, "eure": 25311, "euref</w>": 48017, "eureka</w>": 31686, "euro": 2039, "euro</w>": 8463, "euroleague</w>": 46821, "europa</w>": 18290, "europale": 42473, "europaleague</w>": 44029, "europarl</w>": 44922, "europe": 4198, "europe</w>": 3848, "european": 26712, "european</w>": 4759, "europeans</w>": 37082, "euros</w>": 22274, "eurovision</w>": 17593, "eurozone</w>": 42555, "eurusd</w>": 40895, "eus</w>": 44214, "euston</w>": 46905, "euthan": 43280, "euve</w>": 40652, "eux</w>": 25019, "ev": 776, "ev</w>": 10133, "eva</w>": 6845, "evacu": 13187, "evacuated</w>": 26806, "evacuation</w>": 27353, "eval": 25139, "eval</w>": 9703, "evalu": 10314, "evaluate</w>": 27174, "evaluating</w>": 34541, "evaluation</w>": 17640, "evan": 12821, "evan</w>": 12847, "evangel": 20518, "evangeli": 21372, "evangelical</w>": 36151, "evangelist</w>": 42275, "evankirstel</w>": 46581, "evans</w>": 8836, "evansville</w>": 44782, "evapor": 33352, "evasion</w>": 48795, "eve": 5732, "eve</w>": 1866, "eved</w>": 19820, "evel": 39315, "evelyn</w>": 26687, "evement</w>": 8210, "even": 6359, "even</w>": 1427, "evening": 34487, "evening</w>": 2285, "evenings</w>": 19994, "evenly</w>": 45974, "event": 10612, "event</w>": 1655, "eventful</w>": 45628, "evento</w>": 38155, "eventprofs</w>": 24980, "events</w>": 3667, "eventu": 14055, "eventual</w>": 45321, "eventually</w>": 14397, "ever": 888, "ever</w>": 1247, "everest</w>": 21722, "everett</w>": 25456, "everglades</w>": 46294, "evergreen</w>": 23852, "everlasting</w>": 32849, "evers</w>": 31914, "everton</w>": 13315, "every": 1091, "every</w>": 1505, "everybody</w>": 5901, "everyday": 25049, "everyday</w>": 5160, "everyone</w>": 1584, "everything": 36376, "everything</w>": 2410, "everytime</w>": 16911, "everywhere</w>": 6364, "eves</w>": 7323, "evi": 5348, "evi</w>": 36989, "evic": 21336, "eviction</w>": 37111, "eviden": 46220, "evidence</w>": 6439, "evident</w>": 34529, "evie</w>": 47195, "evil": 23218, "evil</w>": 6006, "eville</w>": 16143, "eving</w>": 24729, "evo": 17962, "evo</w>": 13169, "evoc": 43133, "evol": 5350, "evolu": 7725, "evolution</w>": 8902, "evolutionary</w>": 30629, "evolve</w>": 23406, "evolved</w>": 22613, "evolving</w>": 23675, "evp</w>": 46154, "evs</w>": 33576, "ew": 11942, "ew</w>": 15428, "ewan</w>": 40247, "ewe</w>": 48438, "ewing</w>": 38873, "ews</w>": 9878, "ex": 659, "ex</w>": 4118, "exac": 5460, "exact</w>": 12651, "exactly</w>": 5840, "exagger": 29766, "exal": 49324, "exam": 4428, "exam</w>": 8785, "examination</w>": 20970, "examine</w>": 25728, "examined</w>": 44004, "examiner</w>": 29149, "examines</w>": 28160, "examining</w>": 30616, "example</w>": 6228, "examples</w>": 14790, "exams</w>": 14028, "exas</w>": 47536, "exc": 1302, "excav": 20733, "excavation</w>": 45909, "exce": 10999, "exceed</w>": 32521, "exceeded</w>": 36221, "exceeding</w>": 47213, "exceeds</w>": 49353, "excel": 28351, "excel</w>": 18754, "excell": 3298, "excellence</w>": 8171, "excellency</w>": 36503, "excellent</w>": 4239, "excelsi": 47315, "excep": 8882, "except</w>": 8541, "exception</w>": 25018, "exceptional</w>": 13425, "exceptionally</w>": 29306, "excer": 17737, "excerpt</w>": 20586, "excess</w>": 22491, "excessive</w>": 21332, "exchange</w>": 6616, "exchanged</w>": 48919, "exchanges</w>": 29730, "exchanging</w>": 47760, "excit": 10510, "excite</w>": 47711, "excited</w>": 1889, "excitement</w>": 11407, "exciting</w>": 4300, "exclu": 3114, "exclude</w>": 49235, "excluded</w>": 46216, "excluding</w>": 44326, "exclusion</w>": 40219, "exclusive</w>": 3747, "exclusively</w>": 13565, "exclusives</w>": 47149, "excu": 7324, "excur": 27533, "excursion</w>": 34869, "excuse</w>": 9266, "excuses</w>": 19388, "exe": 3554, "exe</w>": 48027, "exec</w>": 15052, "execs</w>": 35728, "execu": 4360, "execute</w>": 36405, "executed</w>": 20432, "execution</w>": 18085, "executive</w>": 5944, "executives</w>": 24357, "exem": 19753, "exemp": 28602, "exempl": 36371, "exemplary</w>": 39123, "exempli": 41934, "exempt</w>": 44278, "exemption</w>": 47481, "exer": 40295, "exerc": 5932, "exercise</w>": 7016, "exercises</w>": 19669, "exercising</w>": 39036, "exeter": 32137, "exeter</w>": 18837, "exfoli": 38823, "exhau": 11154, "exhaust</w>": 21812, "exhausted</w>": 21741, "exhausting</w>": 40035, "exhaustion</w>": 49221, "exhi": 3022, "exhib": 3783, "exhibit": 24992, "exhibit</w>": 8209, "exhibiting</w>": 23889, "exhibition</w>": 4219, "exhibitions</w>": 28311, "exhibitor</w>": 44192, "exhibitors</w>": 38542, "exhibits</w>": 30093, "exhilar": 40262, "exhilarating</w>": 49289, "exi": 5297, "exico</w>": 38712, "exile</w>": 28566, "exist": 10899, "exist</w>": 9645, "existed</w>": 23198, "existence</w>": 13832, "existent</w>": 43541, "existential</w>": 38752, "existing</w>": 12886, "exists</w>": 14608, "exit</w>": 9374, "exited</w>": 37581, "exiting</w>": 39577, "exits</w>": 34943, "exmoor</w>": 48260, "exo": 15600, "exo</w>": 5842, "exodus</w>": 30098, "exol</w>": 42856, "exop": 35288, "exoplan": 37980, "exor": 24506, "exorcist</w>": 46309, "exotic</w>": 15639, "exp": 9923, "exp</w>": 19066, "expan": 7512, "expand": 10382, "expand</w>": 13141, "expanded</w>": 18390, "expanding</w>": 15755, "expands</w>": 22223, "expanse</w>": 46886, "expansion</w>": 10138, "expansive</w>": 49261, "expat</w>": 43900, "expe": 2560, "expect": 9802, "expect</w>": 5716, "expectation</w>": 34273, "expectations</w>": 12529, "expected</w>": 5573, "expecting</w>": 12525, "expects</w>": 24536, "expedition</w>": 16761, "expeditions</w>": 49327, "expelled</w>": 48834, "expen": 7216, "expend": 29302, "expenditure</w>": 47044, "expense</w>": 28473, "expenses</w>": 21797, "expensive</w>": 9649, "exper": 1533, "experi": 4723, "experience": 31867, "experience</w>": 2415, "experienced</w>": 10417, "experiences</w>": 8233, "experiencing</w>": 16643, "experiential</w>": 44952, "experim": 6697, "experiment</w>": 13079, "experimental</w>": 16539, "experimenting</w>": 28263, "experiments</w>": 21077, "expert</w>": 6284, "expertise</w>": 16555, "experts</w>": 6960, "expi": 26850, "expir": 35077, "expire</w>": 49315, "expired</w>": 30200, "expires</w>": 34739, "expl": 3261, "expla": 3517, "explain": 48918, "explain</w>": 7304, "explained</w>": 14229, "explaining</w>": 13136, "explains</w>": 6655, "explan": 13294, "explanation</w>": 16577, "explanations</w>": 34383, "explic": 21011, "explicit</w>": 33228, "explo": 3586, "explode</w>": 31262, "exploded</w>": 28947, "explodes</w>": 38119, "exploding</w>": 34683, "exploit</w>": 36953, "exploited</w>": 48554, "explor": 11958, "exploration</w>": 14043, "explore": 10405, "explore</w>": 5147, "explorebc</w>": 38754, "explorecanada</w>": 36600, "explored</w>": 25016, "explorer</w>": 15776, "explorers</w>": 28491, "explores</w>": 13996, "exploring</w>": 7584, "explosion</w>": 13785, "explosions</w>": 38646, "explosive</w>": 18888, "explosives</w>": 44705, "expo": 7820, "expo</w>": 6344, "expon": 27905, "export</w>": 14444, "exporting</w>": 47433, "exports</w>": 20088, "expose</w>": 23181, "exposed</w>": 12180, "exposes</w>": 33575, "exposing</w>": 28362, "exposition</w>": 36943, "exposure</w>": 11903, "expre": 6085, "express": 18553, "express</w>": 5642, "expressed</w>": 20777, "expresses</w>": 31931, "expressing</w>": 30207, "expression</w>": 11357, "expressions</w>": 20314, "expressive</w>": 42060, "expressway</w>": 31658, "exquis": 16575, "exquisite</w>": 17958, "ext": 5711, "ext</w>": 20072, "exten": 5555, "extend</w>": 14492, "extended</w>": 9614, "extending</w>": 25652, "extends</w>": 20688, "extension</w>": 10275, "extensions</w>": 24525, "extensive</w>": 16870, "extensively</w>": 47365, "extent</w>": 24913, "exter": 9797, "exterior</w>": 19352, "extermin": 41671, "external</w>": 15028, "extin": 13553, "extinct</w>": 24488, "extinction</w>": 21186, "extingui": 38567, "extor": 35620, "extr": 29082, "extra": 6416, "extra</w>": 4231, "extrac": 18550, "extract</w>": 18962, "extraction</w>": 28789, "extracts</w>": 45576, "extraordin": 23628, "extraordinaire</w>": 30909, "extraordinary</w>": 10982, "extras</w>": 29817, "extravag": 22299, "extravaganza</w>": 29461, "extre": 3978, "extreme": 38357, "extreme</w>": 8331, "extremely</w>": 6519, "extremism</w>": 31493, "extremist</w>": 36383, "extremists</w>": 41425, "extru": 43010, "ey": 1541, "ey</w>": 1477, "eyang</w>": 28915, "eye": 5034, "eye</w>": 3272, "eyebrow</w>": 34250, "eyebrows</w>": 19923, "eyed</w>": 15512, "eyeing</w>": 34916, "eyel": 17075, "eyelashes</w>": 42074, "eyeliner</w>": 33354, "eyeon</w>": 25126, "eyes</w>": 3095, "eyeshadow</w>": 35213, "eyewear</w>": 30165, "eyewitness</w>": 36258, "eyou": 31996, "eyour": 40229, "eyre</w>": 44115, "ez": 10082, "ez</w>": 8387, "eze": 25993, "eze</w>": 27229, "ezekiel</w>": 41428, "ezra</w>": 27552, "f": 69, "f</w>": 325, "fa": 778, "fa</w>": 2800, "faa</w>": 27577, "fab": 2833, "fab</w>": 5492, "faber": 43461, "faber</w>": 42488, "fabi": 29425, "fabian</w>": 34539, "fabio</w>": 31666, "fabric": 16217, "fabric</w>": 10033, "fabricated</w>": 40851, "fabrication</w>": 33476, "fabrics</w>": 23159, "fabulous</w>": 5189, "fac": 1053, "fac</w>": 35438, "facade</w>": 29217, "face": 2545, "face</w>": 1710, "facebook": 36156, "facebook</w>": 2943, "faced</w>": 10941, "faceli": 32023, "facelift</w>": 36380, "faceoff</w>": 42710, "facep": 45285, "faces</w>": 4905, "faceted</w>": 43435, "facetime</w>": 24076, "facial</w>": 11909, "facil": 39973, "facilit": 13567, "facilitate</w>": 26733, "facilitated</w>": 43853, "facilitating</w>": 34796, "facilities</w>": 10388, "facility</w>": 8165, "facing</w>": 7619, "fact": 17189, "fact</w>": 3598, "factfriday</w>": 27953, "faction</w>": 14629, "factor": 21082, "factor</w>": 8124, "factories</w>": 36492, "factors</w>": 12733, "factory": 42483, "factory</w>": 6072, "facts</w>": 5085, "factual</w>": 45471, "faculty</w>": 9504, "facup</w>": 25283, "fad": 12632, "fad</w>": 47669, "fade</w>": 20486, "faded</w>": 26051, "fades</w>": 40441, "fading</w>": 32882, "fadnavis</w>": 38945, "faf": 31052, "faf</w>": 43903, "fag": 25617, "fag</w>": 39305, "fah": 25495, "fah</w>": 35429, "fahren": 45527, "fai": 20519, "fai</w>": 26384, "fail": 7105, "fail</w>": 6801, "failed</w>": 8314, "failing</w>": 15757, "fails</w>": 13388, "failure</w>": 8732, "failures</w>": 25442, "faint</w>": 30807, "fair": 3031, "fair</w>": 2849, "fairbanks</w>": 43962, "faire": 34745, "faire</w>": 20798, "fairfax</w>": 29368, "fairfield</w>": 29664, "fairgrounds</w>": 38325, "fairi": 28884, "fairies</w>": 33590, "fairly</w>": 14961, "fairmont</w>": 41547, "fairness</w>": 29388, "fairs</w>": 8655, "fairtrade</w>": 33361, "fairview</w>": 43479, "fairway</w>": 44022, "fairy": 17021, "fairy</w>": 10444, "fairytale</w>": 28944, "fais": 23542, "faisal</w>": 35459, "fait</w>": 20567, "faith": 10653, "faith</w>": 5080, "faithful</w>": 15511, "faiz": 41775, "fake": 18794, "fake</w>": 5777, "faken": 22853, "fakenews</w>": 26943, "fakespeare</w>": 49095, "fal": 2778, "fal</w>": 40494, "fala": 47120, "falcon": 22498, "falcon</w>": 13571, "falcons</w>": 13834, "falk": 34648, "falkirk</w>": 44080, "fall": 6489, "fall</w>": 2359, "fallen</w>": 8688, "falling": 48709, "falling</w>": 7293, "fallon": 39596, "fallon</w>": 21281, "fallontonight</w>": 44627, "fallout": 49365, "fallout</w>": 16009, "falls</w>": 4778, "falmouth</w>": 38261, "false": 38948, "false</w>": 9078, "falsely</w>": 42321, "fam": 1058, "fam</w>": 5128, "fame</w>": 6573, "famed</w>": 23302, "famer</w>": 24554, "famil": 3395, "famili": 8488, "familia</w>": 25622, "familiar</w>": 10020, "families</w>": 4612, "family": 8137, "family</w>": 1315, "familyfun</w>": 46308, "familytime</w>": 47236, "familytravel</w>": 38222, "famine</w>": 35847, "famous": 44811, "famous</w>": 4096, "famously</w>": 44505, "fan": 1675, "fan</w>": 2261, "fanart": 41059, "fanart</w>": 7855, "fanartfriday</w>": 45346, "fanatic</w>": 36643, "fanatics</w>": 39610, "fanbase</w>": 36921, "fanboy</w>": 43369, "fanc": 29017, "fancafe</w>": 45080, "fanci": 35908, "fanclub</w>": 31530, "fancy": 47622, "fancy</w>": 6733, "fand": 19684, "fandom": 47634, "fandom</w>": 11534, "fanfest</w>": 42916, "fanfic</w>": 47243, "fang": 14269, "fang</w>": 27428, "fangirl</w>": 28813, "fangirling</w>": 39463, "fanning</w>": 37282, "fanny</w>": 30401, "fans": 32454, "fans</w>": 1840, "fansign</w>": 25288, "fant": 4467, "fanta": 2703, "fantaken</w>": 39412, "fantasia</w>": 49306, "fantastic": 31289, "fantastic</w>": 2935, "fantasy": 15124, "fantasy</w>": 5267, "fantasyfootball</w>": 35713, "fao</w>": 31155, "faq</w>": 28533, "far": 1578, "far</w>": 2384, "fara</w>": 48562, "farage</w>": 28340, "farah</w>": 31547, "fare": 8620, "fare</w>": 6461, "fares</w>": 27525, "farewell</w>": 10734, "fargo</w>": 18870, "fari": 26197, "farley</w>": 43761, "farm": 9066, "farm</w>": 3985, "farmer": 19735, "farmer</w>": 10474, "farmers": 29752, "farmers</w>": 6402, "farmersmarket</w>": 41808, "farmhouse</w>": 26293, "farming</w>": 10399, "farmington</w>": 49305, "farmland</w>": 45258, "farms</w>": 11277, "farn": 27527, "faroo": 39147, "farra": 33657, "farrakhan</w>": 46293, "farrell</w>": 24234, "fart</w>": 34664, "farther</w>": 42233, "fas": 4830, "fas</w>": 42995, "fasci": 17191, "fascin": 7327, "fascinated</w>": 32964, "fascinating</w>": 8640, "fascism</w>": 28213, "fascist</w>": 23870, "fascists</w>": 43598, "fash": 42682, "fashi": 2099, "fashion": 6976, "fashion</w>": 2444, "fashionable</w>": 24597, "fashionblogger</w>": 31726, "fashioned</w>": 21563, "fashioni": 26062, "fashionista</w>": 30415, "fashions</w>": 37601, "fashionshow</w>": 45653, "fashionweek</w>": 28684, "fass": 42398, "fast": 8509, "fast</w>": 1953, "fasten": 44990, "faster</w>": 8835, "fastest</w>": 9808, "fasting</w>": 24656, "fat": 4751, "fat</w>": 5484, "fatal</w>": 12124, "fatalities</w>": 44168, "fatally</w>": 34069, "fate": 26315, "fate</w>": 11734, "father": 11607, "father</w>": 3224, "fathers</w>": 12780, "fathersday</w>": 16731, "fati": 13430, "fatigue</w>": 23747, "fatima</w>": 28202, "fats</w>": 30151, "fatt": 44131, "fatty</w>": 22953, "fau": 5571, "fau</w>": 31381, "faucet</w>": 44273, "faul": 16230, "faulkner</w>": 37840, "fault</w>": 13862, "faults</w>": 42752, "faulty</w>": 47103, "fauna</w>": 30808, "faust</w>": 44772, "faux</w>": 19429, "fav": 1355, "fav</w>": 5426, "fave</w>": 7272, "faves</w>": 18003, "favor": 1766, "favor</w>": 12160, "favorable</w>": 35392, "favored</w>": 46640, "favorite": 35262, "favorite</w>": 1916, "favorited</w>": 36926, "favorites</w>": 10564, "favors</w>": 36085, "favour": 3111, "favour</w>": 20469, "favourite</w>": 3342, "favourites</w>": 16585, "favs</w>": 18879, "faw": 21800, "fawad": 46425, "fawn</w>": 48624, "fax": 32535, "fax</w>": 9337, "fay": 8939, "fay</w>": 40074, "faye</w>": 30257, "fayette": 32043, "fayette</w>": 19782, "fayetteville</w>": 37771, "fayre</w>": 34982, "faz": 26238, "faze</w>": 44880, "fb": 22637, "fb</w>": 3307, "fball</w>": 29663, "fbf</w>": 20004, "fbi</w>": 10293, "fbloggers</w>": 41389, "fbs</w>": 48454, "fc": 4278, "fc</w>": 1399, "fca</w>": 24540, "fcb": 26639, "fcb</w>": 25045, "fcbarcelona</w>": 32174, "fcbayern</w>": 35033, "fcblive</w>": 44608, "fcc</w>": 21240, "fck": 40080, "fck</w>": 49263, "fcofficial</w>": 27805, "fcs</w>": 32095, "fcu</w>": 47898, "fd": 16972, "fd</w>": 11525, "fda</w>": 17823, "fdi</w>": 45579, "fdn</w>": 18563, "fdny</w>": 41084, "fdr</w>": 42298, "fe": 623, "fe</w>": 873, "fear": 8744, "fear</w>": 5402, "feared</w>": 31154, "fearless</w>": 17470, "fears</w>": 13867, "fearthe": 33449, "feasi": 34977, "feast": 37963, "feast</w>": 9564, "feat": 1703, "feat</w>": 5611, "feather": 24905, "feather</w>": 17871, "feathers</w>": 21138, "featherweight</w>": 44939, "feature": 30413, "feature</w>": 4527, "featured</w>": 4743, "features</w>": 4643, "featuring</w>": 3706, "feb</w>": 4317, "febru": 4202, "february</w>": 4248, "fect</w>": 31293, "fed": 22518, "fed</w>": 7035, "feder": 4737, "federal</w>": 6369, "federation</w>": 15530, "federer</w>": 18246, "federico</w>": 40539, "fedex</w>": 32603, "fedora</w>": 45111, "feds</w>": 30593, "fee": 28242, "fee</w>": 9224, "feed": 6662, "feed</w>": 5839, "feedback</w>": 8683, "feeder</w>": 24482, "feeders</w>": 44523, "feeding</w>": 9879, "feeds</w>": 21788, "feel": 2408, "feel</w>": 2051, "feelin</w>": 19903, "feeling": 33087, "feeling</w>": 3045, "feelings</w>": 9452, "feels</w>": 4808, "feelthe": 22322, "feelthebern</w>": 27743, "fees</w>": 11765, "feet</w>": 4804, "fei": 23441, "fei</w>": 34217, "fein": 46707, "feinstein</w>": 41313, "fel": 2081, "fel</w>": 20304, "feld": 45913, "feld</w>": 14219, "feldman</w>": 41942, "feli": 7498, "felic": 25845, "felici": 23379, "felicia</w>": 41139, "felicidades</w>": 41648, "felicity</w>": 35123, "feline</w>": 29471, "felipe</w>": 27681, "felix": 33455, "felix</w>": 16514, "feliz": 26104, "feliz</w>": 20221, "fell": 33540, "fell</w>": 6266, "fella</w>": 17586, "fellas</w>": 18787, "feller</w>": 29226, "fellow": 12099, "fellow</w>": 5242, "fellows</w>": 15766, "fellowship</w>": 13857, "felony</w>": 31068, "felt</w>": 5413, "fem": 24574, "fem</w>": 36615, "fema</w>": 41721, "female": 22062, "female</w>": 3970, "females</w>": 21028, "femi</w>": 38607, "femin": 11423, "femini": 11894, "feminine</w>": 24911, "feminism</w>": 18784, "feminist</w>": 14921, "feminists</w>": 38809, "femme</w>": 31331, "fen": 5509, "fen</w>": 25024, "fence</w>": 12679, "fences</w>": 34312, "fencing</w>": 23489, "fender</w>": 17117, "fener": 41208, "fenerbah": 46652, "feng</w>": 33291, "fennel</w>": 28689, "fent": 26395, "fenton</w>": 47265, "fenway</w>": 29206, "fer": 1765, "fer</w>": 2897, "fera</w>": 37705, "feral</w>": 29972, "ferdin": 25541, "ferdinand</w>": 27591, "fere</w>": 43144, "feren": 35652, "ference</w>": 19984, "ferg</w>": 44938, "fergie</w>": 39119, "fergu": 10988, "fergus</w>": 42041, "ferguson</w>": 11904, "fermentation</w>": 45817, "fermented</w>": 36886, "fern": 10747, "fern</w>": 21685, "fernandes</w>": 44391, "fernandez</w>": 23436, "fernando</w>": 17140, "ferns</w>": 38277, "feroci": 45652, "ferr": 7256, "ferra": 47911, "ferrari</w>": 9606, "ferre": 29626, "ferred</w>": 10432, "ferreira</w>": 48686, "ferrell</w>": 41112, "ferrer</w>": 38904, "ferri": 42008, "ferries</w>": 28489, "ferris</w>": 27532, "ferry": 38936, "ferry</w>": 10278, "fers</w>": 12378, "fert": 14925, "fert</w>": 43662, "fertil": 41987, "fertile</w>": 44837, "fertili": 23912, "fertility</w>": 23528, "fertilizer</w>": 36786, "fery</w>": 47448, "fes</w>": 32300, "fest": 17383, "fest</w>": 2590, "festa</w>": 42124, "festi": 1943, "festiv": 19222, "festival": 20946, "festival</w>": 2240, "festivals</w>": 17834, "festive</w>": 9533, "festivities</w>": 21020, "fet": 21409, "feta</w>": 31705, "fetal</w>": 42031, "fetch</w>": 30271, "fete</w>": 34629, "fett</w>": 37979, "fetus</w>": 26768, "feu": 24912, "feu</w>": 32990, "feud</w>": 27365, "fever": 40896, "fever</w>": 9989, "fevre</w>": 43861, "few</w>": 1939, "fewer</w>": 19128, "fex": 41584, "fex</w>": 26392, "fey": 39069, "fey</w>": 23298, "fez</w>": 43081, "ff": 1021, "ff</w>": 1304, "ffa</w>": 15355, "ffame</w>": 42873, "ffc</w>": 19832, "ffe": 1138, "ffe</w>": 8631, "ffect</w>": 29151, "ffed</w>": 8448, "ffee</w>": 26377, "ffel</w>": 22656, "ffen</w>": 46537, "ffer": 27369, "ffer</w>": 11636, "ffers</w>": 32163, "fferty</w>": 44771, "ffes</w>": 46441, "ffey</w>": 30138, "fff</w>": 28106, "ffi": 19961, "ffic": 4762, "ffice</w>": 26044, "ffici": 3639, "fficial": 39818, "fficial</w>": 6463, "fficiency</w>": 27800, "fficient</w>": 20424, "ffin": 12779, "ffin</w>": 7367, "ffing</w>": 16592, "ffins</w>": 17898, "ffl</w>": 39490, "ffle</w>": 7749, "ffler</w>": 39819, "ffles</w>": 19344, "ffman</w>": 15823, "ffo</w>": 42264, "ffs</w>": 4424, "ffxiv</w>": 26569, "ffxv</w>": 46786, "ffy": 26404, "ffy</w>": 7795, "fg": 45977, "fg</w>": 6823, "fgm</w>": 32178, "fgo</w>": 46113, "fh": 21649, "fh</w>": 21010, "fhs</w>": 45094, "fi": 701, "fi</w>": 3589, "fia</w>": 8827, "fiable</w>": 34373, "fianc": 27752, "fiance</w>": 44114, "fiancé</w>": 34039, "fiasco</w>": 40944, "fiat</w>": 16740, "fiawec</w>": 39485, "fib</w>": 40594, "fiba</w>": 34993, "fiber": 35074, "fiber</w>": 12612, "fibers</w>": 44587, "fibre</w>": 21401, "fibro": 21294, "fibrosis</w>": 36307, "fic": 1788, "fic</w>": 2059, "fica</w>": 26952, "fically</w>": 14854, "fication</w>": 4523, "fications</w>": 12512, "ficial</w>": 48192, "fics</w>": 42505, "fiction</w>": 6218, "fictional</w>": 25570, "fid": 34197, "fid</w>": 23966, "fidd": 25218, "fiddle</w>": 35968, "fide</w>": 45375, "fidel": 21740, "fidel</w>": 36837, "fidelity</w>": 30109, "fidget</w>": 48664, "fie": 28487, "fie</w>": 10348, "fied": 29642, "fied</w>": 2853, "fiel": 1361, "field": 7571, "field</w>": 1570, "fielder</w>": 11046, "fieldhouse</w>": 37969, "fielding</w>": 30465, "fields</w>": 6494, "fieldwork</w>": 33155, "fiends</w>": 37869, "fier": 11167, "fier</w>": 10598, "fierc": 48609, "fierce</w>": 13896, "fiercely</w>": 49039, "fiers</w>": 16113, "fiery</w>": 24557, "fies</w>": 9537, "fiesta</w>": 14580, "fif": 5309, "fifa": 21976, "fifa</w>": 8516, "fifaworldcup</w>": 38819, "fifawwc</w>": 41329, "fife</w>": 24374, "fifteen</w>": 29504, "fifth": 25515, "fifth</w>": 8772, "fifthharmony</w>": 31075, "fifty": 24456, "fifty</w>": 15978, "fig": 4814, "fig</w>": 20719, "figaro</w>": 48044, "figh": 23274, "fight": 5262, "fight</w>": 2757, "fighter": 35884, "fighter</w>": 6438, "fighters</w>": 7371, "fightfor": 48909, "fightfor</w>": 35740, "fighting": 38625, "fighting</w>": 4652, "fighton</w>": 45578, "fights</w>": 12132, "figs</w>": 38882, "figu": 6390, "figur": 16948, "figurative</w>": 44042, "figure": 48820, "figure</w>": 5274, "figured</w>": 15630, "figures</w>": 8739, "figurine</w>": 33306, "figuring</w>": 31513, "fiji": 48270, "fiji</w>": 18285, "fik</w>": 46589, "fil": 1142, "fil</w>": 14915, "fila</w>": 30992, "filament</w>": 49252, "file": 12545, "file</w>": 4512, "filed</w>": 13864, "files</w>": 7850, "filet</w>": 43155, "fili": 9590, "filing</w>": 16576, "filip": 14368, "filipino</w>": 19153, "fill": 15904, "fill</w>": 6277, "filled</w>": 5589, "filler</w>": 32816, "fillers</w>": 45005, "fillet</w>": 39276, "filling</w>": 9736, "fillion</w>": 38048, "fillmore</w>": 43922, "fills</w>": 21750, "filly</w>": 27690, "film": 5117, "film</w>": 1860, "filmed</w>": 15801, "filmfare</w>": 42224, "filmfest</w>": 24508, "filmfestival</w>": 28066, "filming</w>": 6866, "filmmaker</w>": 17202, "filmmakers</w>": 24896, "filmmaking</w>": 18226, "films</w>": 5370, "fils</w>": 40271, "filter</w>": 7541, "filtered</w>": 29926, "filtering</w>": 47770, "filters</w>": 18385, "filth</w>": 39713, "filthy</w>": 26899, "filtr": 21408, "filtration</w>": 42036, "fim</w>": 47525, "fin": 735, "fin</w>": 10663, "fina</w>": 34497, "final": 11968, "final</w>": 1755, "finale</w>": 7844, "finalfantasy</w>": 44543, "finalfour</w>": 46999, "finalist</w>": 12620, "finalists</w>": 13422, "finalized</w>": 48930, "finally</w>": 1992, "finals</w>": 4536, "finan": 4807, "finance</w>": 6117, "finances</w>": 28767, "financi": 12846, "financial": 19783, "financial</w>": 4930, "financially</w>": 28124, "financing</w>": 18375, "finch</w>": 18523, "find": 18638, "find</w>": 1416, "finder</w>": 15045, "finders</w>": 43884, "findia</w>": 47064, "finding": 37455, "finding</w>": 6002, "findings</w>": 16529, "findlay</w>": 48227, "findom</w>": 36463, "finds</w>": 6680, "findyour": 25936, "findyourpark</w>": 38924, "fine": 12042, "fine</w>": 3797, "fineart": 7484, "fineart</w>": 16005, "fineartamerica</w>": 7724, "fined</w>": 20094, "finely</w>": 46120, "finer</w>": 36681, "fines</w>": 25053, "finesse</w>": 46047, "finest</w>": 7707, "fing": 6485, "fing</w>": 17955, "finger": 13480, "finger</w>": 8895, "fingerprint</w>": 39579, "fingers</w>": 9690, "fini": 2405, "finish": 42178, "finish</w>": 3958, "finished</w>": 3078, "finisher</w>": 38636, "finishers</w>": 48661, "finishes</w>": 13078, "finishing</w>": 7912, "finite</w>": 48312, "finity": 41463, "finity</w>": 21273, "fink</w>": 40158, "finland</w>": 10775, "finley</w>": 41652, "finn": 28479, "finn</w>": 16925, "finna</w>": 35180, "finnish</w>": 19616, "fino</w>": 30083, "fins</w>": 32810, "fintech": 48929, "fintech</w>": 8899, "fion": 27476, "fiona</w>": 20099, "fior": 37086, "fiore</w>": 44997, "fioren": 33188, "fiorentina</w>": 43713, "fios</w>": 42521, "fir": 770, "fir</w>": 16233, "fire": 2951, "fire</w>": 1769, "firearm</w>": 40311, "firearms</w>": 23960, "fireball</w>": 40543, "firec": 42806, "fired</w>": 8846, "firefighter</w>": 20498, "firefighters</w>": 12600, "firefly</w>": 33997, "firefox</w>": 35372, "fireman</w>": 46085, "firen": 34752, "firenze</w>": 38445, "fireplace</w>": 23050, "fires</w>": 8749, "fireside</w>": 36185, "firework</w>": 40750, "fireworks</w>": 10641, "firing</w>": 15105, "firm": 16936, "firm</w>": 7705, "firmly</w>": 29156, "firms</w>": 13655, "firmware</w>": 42691, "first": 6853, "first</w>": 874, "firstdayof": 44297, "firsth": 48512, "firsts</w>": 47884, "firth</w>": 26078, "fis": 7846, "fis</w>": 47683, "fiscal</w>": 20825, "fischer</w>": 26532, "fish": 6431, "fish</w>": 2759, "fisher": 11175, "fisher</w>": 9176, "fisheries</w>": 24612, "fisherman</w>": 25055, "fishermen</w>": 28547, "fishers</w>": 42065, "fishery</w>": 49057, "fishes</w>": 35470, "fishing": 31703, "fishing</w>": 4935, "fishy</w>": 35665, "fist": 48340, "fist</w>": 17085, "fit": 2366, "fit</w>": 2478, "fitbit</w>": 33768, "fitch</w>": 44614, "fitfam</w>": 20662, "fitnes": 47285, "fitness": 20044, "fitness</w>": 4838, "fits</w>": 6401, "fitt": 32994, "fitted</w>": 14863, "fitter</w>": 42096, "fitters</w>": 32364, "fitting</w>": 11769, "fittings</w>": 45787, "fitz": 11120, "fitz</w>": 25913, "fitzgerald</w>": 20606, "fitzpatrick</w>": 37141, "fiu</w>": 38374, "five": 19508, "five</w>": 3127, "fives</w>": 44066, "fix": 4596, "fix</w>": 6028, "fixed</w>": 9393, "fixes</w>": 25473, "fixing</w>": 17423, "fixture</w>": 17317, "fixtures</w>": 19904, "fizz</w>": 31242, "fj": 43183, "fj</w>": 46447, "fjor": 31260, "fk</w>": 12410, "fl": 1082, "fl</w>": 2685, "fla": 1577, "fla</w>": 20292, "flag": 11536, "flag</w>": 4859, "flagged</w>": 45012, "flags</w>": 12221, "flagship</w>": 19779, "flagstaff</w>": 40406, "flair</w>": 24938, "flake</w>": 21221, "flakes</w>": 20934, "flam": 10559, "flame": 40351, "flame</w>": 13484, "flamen": 28826, "flamenco</w>": 37362, "flames</w>": 13441, "flamin": 42693, "flaming</w>": 34782, "flamingo</w>": 30323, "flan": 14572, "flanagan</w>": 28641, "flanders</w>": 34837, "flank</w>": 44553, "flann": 39510, "flannel</w>": 37807, "flap</w>": 35253, "flappy</w>": 40241, "flare</w>": 21185, "flares</w>": 46088, "flash": 6089, "flash</w>": 5815, "flashback": 14616, "flashback</w>": 11988, "flashbackfriday</w>": 15014, "flashbacks</w>": 47056, "flashes</w>": 31259, "flashing</w>": 31764, "flashlight</w>": 37256, "flask</w>": 36194, "flat": 8986, "flat</w>": 6313, "flats</w>": 17228, "flatt": 45498, "flattering</w>": 43267, "flaun": 41421, "flav": 7191, "flavo": 28895, "flavor": 31835, "flavor</w>": 11818, "flavored</w>": 29350, "flavorful</w>": 49135, "flavors</w>": 16930, "flavour</w>": 17026, "flavoured</w>": 42397, "flavours</w>": 21083, "flaw": 14268, "flaw</w>": 34978, "flawed</w>": 35136, "flawless</w>": 15531, "flaws</w>": 30492, "flax</w>": 43443, "fle": 2428, "fle</w>": 44964, "flea</w>": 24883, "fleck</w>": 28143, "fled</w>": 26731, "flee": 19427, "flee</w>": 30167, "fleece</w>": 25038, "fleeing</w>": 30543, "fleek</w>": 43513, "fleet": 35922, "fleet</w>": 9147, "fleetwood</w>": 28883, "fleming</w>": 25769, "fler</w>": 48789, "flesh</w>": 17495, "flet": 16102, "fletcher</w>": 19810, "fleur</w>": 28593, "flew</w>": 13768, "flex": 16426, "flex</w>": 12038, "flexi": 10032, "flexibility</w>": 22547, "flexible</w>": 14502, "flexing</w>": 48483, "fli": 2472, "flick": 13746, "flick</w>": 23414, "flickr</w>": 17755, "flies</w>": 8070, "flight": 24701, "flight</w>": 3795, "flights</w>": 10515, "flin": 24730, "flin</w>": 43816, "flinders</w>": 44647, "fling</w>": 22768, "flint": 28306, "flint</w>": 18324, "flip": 20385, "flip</w>": 11035, "flipk": 30829, "flipkart</w>": 33154, "flipped</w>": 28144, "flipping</w>": 25881, "flips</w>": 35089, "flir": 24330, "flirt</w>": 38352, "flirting</w>": 35243, "flix</w>": 40663, "flo": 1945, "flo</w>": 20711, "float</w>": 16123, "floating</w>": 12619, "floats</w>": 33272, "flock": 36297, "flock</w>": 21822, "flondon</w>": 47366, "floo": 4062, "flood": 23793, "flood</w>": 7148, "flooded</w>": 19706, "flooding</w>": 10204, "floods</w>": 16369, "floor": 23657, "floor</w>": 4125, "flooring</w>": 19227, "floors</w>": 15671, "flop</w>": 22994, "floppy</w>": 38267, "flops</w>": 29146, "flor": 15784, "flor</w>": 41669, "flora</w>": 18906, "floral</w>": 10732, "florals</w>": 48331, "floren": 37706, "florence</w>": 11617, "flores</w>": 21537, "flori": 3482, "florian</w>": 41861, "florida": 34264, "florida</w>": 3966, "florist</w>": 38403, "floss</w>": 36453, "flotus</w>": 35181, "flour</w>": 18592, "flouri": 23239, "flourish</w>": 36038, "flow": 2180, "flow</w>": 5608, "flower": 12772, "flower</w>": 4055, "flowering</w>": 19953, "flowers</w>": 4023, "flowing</w>": 14922, "flown</w>": 25659, "flows</w>": 16715, "floyd": 46369, "floyd</w>": 13656, "flu": 3698, "flu</w>": 13528, "fluctu": 40181, "fluence</w>": 38169, "fluent</w>": 30025, "fluff</w>": 31174, "fluffy": 40346, "fluffy</w>": 17054, "fluid": 43803, "fluid</w>": 16717, "fluids</w>": 41490, "fluor": 45127, "fluore": 26974, "fluorescent</w>": 35036, "fluori": 45611, "flur": 31591, "flush</w>": 25777, "flushing</w>": 43754, "flute</w>": 23746, "flux</w>": 25249, "flwx</w>": 30907, "fly": 5666, "fly</w>": 3228, "flye": 30873, "flyeagles": 39927, "flyeaglesfly</w>": 39931, "flyer</w>": 11875, "flyers</w>": 14181, "flyfishing</w>": 31800, "flying": 20782, "flying</w>": 4610, "flyn": 40676, "flynn</w>": 15721, "flyo": 33506, "flyover</w>": 38083, "fm": 13715, "fm</w>": 3689, "fman</w>": 25152, "fml</w>": 26730, "fmr</w>": 32875, "fn": 22773, "fn</w>": 21763, "fnc</w>": 46506, "fo": 898, "fo</w>": 6157, "foal</w>": 40386, "foam": 30039, "foam</w>": 14587, "foamed</w>": 26711, "fob</w>": 40315, "focal</w>": 30934, "focu": 5827, "focus</w>": 4353, "focused</w>": 9319, "focuses</w>": 20093, "focusing</w>": 15551, "fod": 31015, "fod</w>": 43299, "fodils</w>": 44411, "foe</w>": 22952, "foes</w>": 46279, "fog</w>": 9417, "foggy</w>": 19770, "foil</w>": 17302, "fol": 1106, "fol</w>": 48616, "fold": 35201, "fold</w>": 11021, "foldable</w>": 48307, "folded</w>": 25233, "folder</w>": 25717, "folding</w>": 15464, "folds</w>": 24266, "foley</w>": 22850, "foli": 7713, "folia</w>": 48964, "foliage</w>": 26350, "folio</w>": 10772, "folk": 10665, "folk</w>": 6032, "folke": 47190, "folkl": 27273, "folklore": 22133, "folklore</w>": 28620, "folklorethursday</w>": 23270, "folks</w>": 5422, "follo": 41417, "follow": 1964, "follow</w>": 1979, "followart</w>": 40957, "followback</w>": 33863, "followed</w>": 6499, "follower</w>": 17039, "followers</w>": 4856, "following</w>": 3473, "followme</w>": 29668, "followparty</w>": 44757, "follows</w>": 11287, "followthe": 30747, "folly</w>": 41408, "folsom</w>": 42108, "fom": 34540, "fon": 5017, "fon</w>": 38318, "fond</w>": 19964, "fonda</w>": 44609, "fondue</w>": 48321, "fone</w>": 40672, "font": 37610, "font</w>": 16248, "fontaine</w>": 37864, "fontana</w>": 43643, "fontein</w>": 45062, "fonts</w>": 32801, "foo": 1183, "foo</w>": 23435, "food": 4586, "food</w>": 1559, "foodand": 38317, "foodbank</w>": 31926, "foodie": 30762, "foodie</w>": 9847, "foodies</w>": 22416, "foodnetwork</w>": 46793, "foods</w>": 7057, "foodsecurity</w>": 49329, "foodtruck</w>": 47682, "fool": 23959, "fool</w>": 12212, "fooled</w>": 28761, "fooling</w>": 47964, "foolish</w>": 33824, "fools</w>": 15946, "foot": 6702, "foot</w>": 4738, "footage</w>": 11130, "footb": 33466, "football": 9376, "football</w>": 1882, "footballer</w>": 20646, "footballers</w>": 30269, "footed</w>": 38040, "footh": 25951, "foothills</w>": 37020, "footpath</w>": 48858, "footprint</w>": 23206, "footprints</w>": 39640, "footsteps</w>": 27289, "footwear</w>": 22772, "footy": 39866, "footy</w>": 18922, "for": 645, "for</w>": 556, "forage</w>": 46871, "foraging</w>": 39056, "forall</w>": 17824, "forbe": 49098, "forbes</w>": 13925, "forbi": 24754, "forbidden</w>": 25164, "force": 12068, "force</w>": 2869, "forced</w>": 8201, "forces</w>": 5381, "forchange</w>": 35848, "forcing</w>": 21573, "ford": 3751, "ford</w>": 1623, "fordfc</w>": 28581, "fordham</w>": 48792, "fords</w>": 29351, "fordshire</w>": 14645, "fore": 1484, "fore</w>": 1332, "forec": 34155, "forecast</w>": 7361, "forecasting</w>": 38133, "forecasts</w>": 27696, "foreclo": 44916, "forefront</w>": 37679, "foreground</w>": 35186, "forehead</w>": 25394, "foreig": 26497, "foreign": 42255, "foreign</w>": 6046, "foreigners</w>": 38549, "foreman</w>": 36174, "foremost</w>": 42128, "foren": 16526, "forensic</w>": 23158, "forensics</w>": 38763, "forest": 18760, "forest</w>": 4167, "forestation</w>": 33939, "forestry</w>": 26281, "forests</w>": 14095, "forever": 14748, "forever</w>": 3225, "forevery": 40605, "forex": 40200, "forex</w>": 17395, "forfe": 44871, "forge</w>": 19232, "forged</w>": 28105, "forget": 46153, "forget</w>": 2678, "forgets</w>": 35613, "forgetting</w>": 25452, "forgi": 22080, "forgive</w>": 15332, "forgiven</w>": 44894, "forgiveness</w>": 23585, "forgood</w>": 39169, "forgot</w>": 6483, "forgotten</w>": 7994, "fork": 24501, "fork</w>": 13700, "forkids</w>": 48571, "forklift</w>": 43202, "forks</w>": 28769, "forlife</w>": 17624, "form": 1157, "form</w>": 1907, "forma</w>": 38829, "formal</w>": 12978, "formally</w>": 24867, "format": 16252, "format</w>": 11874, "formation</w>": 2510, "formations</w>": 37715, "formative</w>": 48882, "formats</w>": 32085, "forme</w>": 42085, "formed</w>": 6528, "former</w>": 2276, "formerly</w>": 20866, "formid": 38599, "formidable</w>": 39834, "forming</w>": 15443, "formity</w>": 42290, "forms</w>": 5161, "formu": 8689, "formul": 23923, "formula": 24485, "formula</w>": 10776, "formulae</w>": 34586, "formulated</w>": 45066, "forre": 38876, "forrest</w>": 25205, "forrester</w>": 45338, "forsa": 48958, "forsale</w>": 13303, "forster</w>": 42923, "forsy": 29629, "forsyth</w>": 40952, "fort": 12300, "fort</w>": 2921, "forte": 44350, "forte</w>": 27367, "forth": 17068, "forth</w>": 11932, "forthcoming</w>": 19989, "forthe": 12521, "forti": 26984, "fortified</w>": 46486, "fortn": 14428, "fortnight</w>": 39235, "fortnite": 38734, "fortnite</w>": 17890, "fortress</w>": 19988, "fortun": 6950, "fortunate</w>": 19898, "fortunately</w>": 34358, "fortune": 40931, "fortune</w>": 11451, "fortunes</w>": 41989, "forty</w>": 24399, "forum": 37851, "forum</w>": 4538, "forums</w>": 31518, "forwar": 34364, "forward": 47031, "forward</w>": 2342, "forwards</w>": 38974, "foryou</w>": 35150, "forz": 46056, "forza": 33293, "forza</w>": 28089, "fos": 36925, "fos</w>": 22081, "foss": 14240, "foss</w>": 37911, "fossil": 20419, "fossil</w>": 15202, "fossilfriday</w>": 26079, "fossils</w>": 30652, "foster": 26778, "foster</w>": 8139, "fostering</w>": 35996, "fosters</w>": 37644, "foto": 15908, "foto</w>": 12823, "fotogra": 23687, "fotografia</w>": 40256, "fotos</w>": 26124, "fou": 14516, "fought</w>": 10844, "foul</w>": 19784, "foun": 3154, "found": 3454, "found</w>": 1546, "foundation</w>": 4058, "foundations</w>": 25219, "founded</w>": 12240, "founder</w>": 5145, "founders</w>": 14602, "founding</w>": 15317, "foundry</w>": 31426, "fountain": 44863, "fountain</w>": 13405, "fountains</w>": 37411, "four": 5113, "four</w>": 2721, "foursquare</w>": 34484, "fourteen</w>": 46255, "fourth</w>": 7516, "fourthofjuly</w>": 47805, "fow": 17084, "fowl</w>": 31685, "fowler</w>": 20980, "fox": 5007, "fox</w>": 3240, "foxandfriends</w>": 45841, "foxes</w>": 24145, "foxnews</w>": 18830, "foxsports</w>": 39267, "foxtv</w>": 49396, "foxx</w>": 32993, "foxy</w>": 27945, "foy</w>": 30284, "foyer</w>": 38011, "foyle</w>": 47902, "fp": 28058, "fp</w>": 8941, "fpl</w>": 27970, "fpp</w>": 36464, "fps</w>": 25300, "fpv</w>": 43175, "fr": 936, "fr</w>": 5512, "fra": 3368, "fra</w>": 15644, "frac": 15607, "fracking</w>": 21894, "fractal</w>": 46471, "fraction</w>": 26788, "fractu": 25847, "fracture</w>": 28995, "fractured</w>": 37421, "fractures</w>": 46213, "frag": 13093, "fragile</w>": 23579, "fragment</w>": 39209, "fragments</w>": 41424, "fragr": 15403, "fragrance</w>": 17874, "fragrances</w>": 44567, "fragrant</w>": 37030, "fram": 27987, "frame": 11029, "frame</w>": 6481, "framed</w>": 13135, "frames</w>": 15479, "framework</w>": 13195, "frameworks</w>": 43136, "framing</w>": 24539, "frampton</w>": 41733, "fran": 2118, "fran</w>": 18878, "franc": 3872, "franc</w>": 42340, "franca": 48952, "france": 12045, "france</w>": 3552, "frances</w>": 20803, "francesca</w>": 32327, "francesco</w>": 25816, "franch": 11756, "franchi": 46438, "franchise</w>": 13664, "franci": 46458, "francis": 22187, "francis</w>": 7660, "francisco</w>": 6887, "franco": 17934, "franco</w>": 17052, "francois</w>": 29317, "frank": 5390, "frank</w>": 5229, "franken": 20487, "franken</w>": 48252, "frankenstein</w>": 26410, "frankfur": 17442, "frankfurt</w>": 18598, "franki": 39227, "frankie": 38373, "frankie</w>": 16215, "franklin": 40935, "franklin</w>": 9999, "frankly</w>": 38015, "franks</w>": 42855, "frans</w>": 47892, "franz</w>": 25449, "franç": 38381, "fraser": 39082, "fraser</w>": 16754, "frat": 15225, "frat</w>": 39292, "fraternity</w>": 24433, "frau": 23063, "fraud": 40647, "fraud</w>": 9961, "fraudul": 42655, "fraudulent</w>": 47408, "fray</w>": 41154, "frazier</w>": 32841, "frc</w>": 41507, "fre": 821, "fre</w>": 43165, "freak": 20352, "freak</w>": 13701, "freaked</w>": 43511, "freakin</w>": 23900, "freaking</w>": 11992, "freaks</w>": 27009, "freaky</w>": 31583, "freck": 33328, "freckles</w>": 48036, "fred": 9486, "fred</w>": 6678, "freddie": 41890, "freddie</w>": 17014, "freddy</w>": 24394, "freder": 10745, "frederic</w>": 41165, "frederick": 37103, "frederick</w>": 18570, "fredo</w>": 48241, "free": 2065, "free</w>": 1139, "freebie</w>": 35865, "freebies</w>": 28630, "freec": 46569, "freed": 12585, "freed</w>": 23392, "freedom": 17992, "freedom</w>": 4511, "freedoms</w>": 32500, "freef": 48678, "freel": 14174, "freelance</w>": 21942, "freely</w>": 24436, "freeman</w>": 16450, "freep": 32499, "freepalestine</w>": 39242, "freer": 44676, "frees": 27455, "freestyle</w>": 15594, "freeway</w>": 24927, "freeze</w>": 14187, "freezer</w>": 25390, "freezing</w>": 12499, "frei": 30183, "freight</w>": 17023, "fremantle</w>": 48012, "fremont</w>": 34578, "fren": 2919, "french": 13118, "french</w>": 3461, "frenzy</w>": 30084, "frequ": 9211, "frequencies</w>": 45319, "frequency</w>": 18825, "frequent</w>": 19836, "frequently</w>": 22434, "fresco</w>": 31609, "fresh": 4065, "fresh</w>": 2975, "fresher</w>": 49284, "freshers</w>": 35810, "freshest</w>": 46809, "freshly</w>": 16081, "freshman</w>": 9381, "freshmen</w>": 21292, "freshness</w>": 45872, "freshwater</w>": 24803, "fresno": 40879, "fresno</w>": 20995, "fret</w>": 40510, "freud</w>": 40787, "frey": 22136, "frey</w>": 9082, "fri": 815, "fri</w>": 6882, "friars</w>": 30513, "fric": 18981, "frick</w>": 46304, "friction</w>": 38563, "frid</w>": 46388, "frida</w>": 36001, "friday": 6350, "friday</w>": 1461, "fridayfeeling</w>": 11952, "fridaymotivation</w>": 38544, "fridaynight": 44858, "fridayreads</w>": 37736, "fridays</w>": 15589, "fridaythe</w>": 47642, "fridge</w>": 13491, "fridges</w>": 40734, "frie": 36999, "fried": 13743, "fried</w>": 7310, "friedman</w>": 29402, "friedrich</w>": 34171, "friend": 3017, "friend</w>": 1625, "friendly": 44612, "friendly</w>": 4681, "friends": 38875, "friends</w>": 1574, "friendship": 42674, "friendship</w>": 7679, "friendships</w>": 28840, "fries</w>": 11369, "frifotos</w>": 40493, "friger": 20785, "friggin</w>": 48300, "frigh": 34831, "fright": 24277, "fright</w>": 40207, "frightened</w>": 47136, "frightening</w>": 39290, "fringe</w>": 10640, "fris": 37252, "frisbee</w>": 45768, "frisco</w>": 35945, "frit": 34614, "fritz</w>": 29860, "friyay</w>": 38887, "frm</w>": 12951, "fro": 626, "fro</w>": 26603, "frock</w>": 45306, "frog": 26494, "frog</w>": 11438, "frogs</w>": 20781, "from": 8330, "from</w>": 633, "frome</w>": 48691, "fromhome</w>": 41477, "fromthe": 18756, "fron": 1847, "fron</w>": 18036, "front": 10996, "front</w>": 2184, "frontal</w>": 35794, "frontier</w>": 18253, "frontiers</w>": 38396, "frontline</w>": 29589, "frontman</w>": 36775, "fronts</w>": 26846, "froome</w>": 48560, "frosh</w>": 47069, "frost": 39420, "frost</w>": 11619, "frosted</w>": 35988, "frosting</w>": 33872, "frosty</w>": 22760, "froze</w>": 47788, "frozen": 42464, "frozen</w>": 8507, "frs</w>": 26216, "fru": 3248, "fruit": 16771, "fruit</w>": 5190, "fruitful</w>": 31494, "fruits</w>": 13282, "fruity</w>": 22320, "frustr": 16046, "frustrated</w>": 25111, "frustrating</w>": 31342, "frustration</w>": 30535, "fry": 33914, "fry</w>": 13686, "fryer</w>": 49217, "frying</w>": 38516, "fs": 23699, "fs</w>": 3854, "fsa</w>": 33373, "fsu": 44185, "fsu</w>": 19317, "ft": 3391, "ft</w>": 981, "fta</w>": 41975, "ftc</w>": 33752, "fted</w>": 5612, "fter</w>": 25063, "fthe": 22886, "ftheday</w>": 9823, "fting</w>": 6174, "fton</w>": 26605, "ftp</w>": 42649, "fts</w>": 3767, "ftse</w>": 46717, "ftw</w>": 19298, "fty</w>": 17494, "fu": 665, "fu</w>": 9098, "fuch": 42617, "fudge</w>": 24270, "fue</w>": 43723, "fuego</w>": 41500, "fuel": 21113, "fuel</w>": 5945, "fueled</w>": 28792, "fueling</w>": 38793, "fuelled</w>": 48357, "fuels</w>": 19365, "fuentes</w>": 44393, "fuer": 29645, "fug": 29227, "fugitive</w>": 39257, "fuji": 15573, "fuji</w>": 21634, "fujifilm</w>": 24765, "fuk</w>": 31051, "fuku": 20728, "fukushima</w>": 33929, "ful": 1814, "ful</w>": 857, "fulbright</w>": 41834, "fulfill": 43675, "fulfill</w>": 27467, "fulfilled</w>": 29919, "fulfilling</w>": 30621, "fulfillment</w>": 45573, "fulham</w>": 25574, "full": 9407, "full</w>": 1476, "fuller</w>": 20225, "fullerton</w>": 42822, "fullest</w>": 35603, "fully": 39142, "fully</w>": 2401, "fulness</w>": 10526, "fuls</w>": 41606, "fulton</w>": 26725, "fum</w>": 38393, "fumble</w>": 49373, "fun": 1229, "fun</w>": 1499, "func": 8679, "function</w>": 8093, "functional</w>": 12885, "functionality</w>": 33316, "functioning</w>": 25479, "functions</w>": 18001, "fund": 19089, "fund</w>": 4877, "fundam": 11670, "fundament": 18852, "fundamental</w>": 17627, "fundamentally</w>": 45378, "fundamentals</w>": 27887, "funday</w>": 15439, "funded</w>": 10588, "funding</w>": 5588, "fundra": 6201, "fundraiser</w>": 10049, "fundraising</w>": 10755, "funds</w>": 7066, "funer": 40693, "funeral</w>": 10606, "funfact</w>": 31596, "funfactfriday</w>": 40710, "fungal</w>": 38838, "fungi</w>": 27837, "fungus</w>": 30677, "funk": 37353, "funk</w>": 13372, "funko": 49402, "funko</w>": 23697, "funky</w>": 16492, "funnel</w>": 27862, "funnier</w>": 42232, "funniest</w>": 15557, "funny": 19124, "funny</w>": 3789, "funrun</w>": 34185, "fur": 2395, "fur</w>": 9686, "furi": 40816, "furious</w>": 17522, "furman</w>": 49238, "furn": 21348, "furnace</w>": 31913, "furnished</w>": 37388, "furnitu": 45696, "furniture</w>": 7993, "furry": 33414, "furry</w>": 15351, "fursuit": 25306, "fursuit</w>": 43083, "fursuitfriday</w>": 27917, "further</w>": 5583, "fury</w>": 14404, "fus</w>": 18419, "fuse</w>": 23386, "fused</w>": 38994, "fusion": 44661, "fusion</w>": 9364, "fuss</w>": 26331, "fut": 21460, "fut</w>": 34049, "futbol</w>": 33014, "futsal</w>": 20558, "futu": 33454, "futur": 38840, "future": 7959, "future</w>": 1904, "futureof": 22599, "futureofwork</w>": 33202, "futures</w>": 13488, "futuri": 19068, "futurism</w>": 48435, "futurist</w>": 48086, "futuristic</w>": 30987, "fuzz": 47128, "fuzz</w>": 40443, "fuzzy</w>": 25876, "fv": 29795, "fw": 23934, "fw</w>": 5277, "fwd</w>": 27052, "fx": 17807, "fx</w>": 9025, "fy": 8440, "fy</w>": 2702, "fyi</w>": 16014, "fying</w>": 5294, "fz</w>": 46400, "fé": 34072, "g": 70, "g</w>": 326, "ga": 1275, "ga</w>": 1531, "gaa</w>": 10715, "gaal</w>": 40867, "gaard</w>": 24645, "gab": 3927, "gab</w>": 37382, "gabbana</w>": 36272, "gabby": 48115, "gabby</w>": 24567, "gabe</w>": 18916, "gabi</w>": 41931, "gable</w>": 33387, "gables</w>": 40928, "gabri": 8311, "gabriel": 31684, "gabriel</w>": 13244, "gabrielle</w>": 33572, "gaby</w>": 46420, "gac</w>": 32520, "gad": 7786, "gad</w>": 44651, "gadget</w>": 25525, "gadgets</w>": 22840, "gado</w>": 29489, "gae</w>": 22003, "gael</w>": 35663, "gaelic</w>": 31173, "gaf": 21354, "gaf</w>": 32670, "gag": 14121, "gag</w>": 18844, "gaga</w>": 9782, "gage</w>": 21081, "gah</w>": 27750, "gai": 24214, "gai</w>": 25153, "gaia</w>": 41269, "gail": 41160, "gail</w>": 27676, "gain": 21536, "gain</w>": 6202, "gaine": 35747, "gained</w>": 14489, "gaines</w>": 49225, "gainesville</w>": 40427, "gaining</w>": 15260, "gains": 42751, "gains</w>": 12107, "gal": 2001, "gal</w>": 4488, "gala</w>": 7211, "galac": 18864, "galactic</w>": 25514, "galap": 41115, "galapagos</w>": 44057, "galat": 39853, "galatasar": 42413, "galatasaray</w>": 47787, "galax": 5647, "galaxies</w>": 32435, "galaxy": 32130, "galaxy</w>": 6545, "gale": 37658, "gale</w>": 21380, "galerie</w>": 44539, "gales</w>": 48633, "gali": 17546, "gali</w>": 30552, "galicia</w>": 47927, "galileo</w>": 39671, "gall": 3011, "gall</w>": 33374, "galla": 16847, "gallagher</w>": 19168, "galleria</w>": 40656, "galleries</w>": 22304, "gallery": 36648, "gallery</w>": 3830, "galley</w>": 48917, "galli": 22568, "gallipoli</w>": 47249, "gallo": 37350, "gallo</w>": 33265, "gallon</w>": 24615, "gallons</w>": 29335, "galloway</w>": 27796, "galore</w>": 22286, "gals</w>": 20125, "galvani": 46046, "galve": 34328, "galveston</w>": 36003, "galway": 38045, "galway</w>": 17112, "gam": 1162, "gam</w>": 34195, "gama</w>": 35873, "gambia</w>": 32988, "gamble</w>": 26121, "gambling</w>": 20287, "game": 2882, "game</w>": 1063, "gameart</w>": 31490, "gameboy</w>": 40951, "gamecube</w>": 44079, "gameday</w>": 9241, "gamedev</w>": 7544, "gameinsight</w>": 42626, "gameof": 10987, "gameofthrones</w>": 11822, "gameon</w>": 47691, "gameplay</w>": 16794, "gamer": 12595, "gamer</w>": 11598, "gamergate</w>": 25961, "gamers</w>": 16166, "gamersunite</w>": 26423, "games": 18551, "games</w>": 1955, "gamescom</w>": 37003, "gamestop</w>": 39436, "gametime</w>": 45899, "gami</w>": 42025, "gamification</w>": 48908, "gaming": 28803, "gaming</w>": 4017, "gamma</w>": 22180, "gamo</w>": 39325, "gan": 1822, "gan</w>": 1670, "gand": 8399, "ganda</w>": 27261, "gander</w>": 44508, "gandhi</w>": 12322, "ganesh</w>": 30362, "ganesha</w>": 45185, "gang": 8066, "gang</w>": 5674, "ganga</w>": 36275, "gangnam</w>": 46777, "gangs</w>": 29844, "gangsta</w>": 37365, "gangster</w>": 26514, "gani</w>": 48324, "gann</w>": 45665, "gannon</w>": 45837, "gano</w>": 25304, "gao</w>": 26556, "gaon</w>": 19279, "gap": 29906, "gap</w>": 7609, "gaps</w>": 25296, "gar": 1099, "gar</w>": 5824, "gara</w>": 28710, "garage</w>": 8474, "garbage</w>": 13760, "garci": 44658, "garcia</w>": 10529, "gard": 7751, "gard</w>": 21003, "garda</w>": 31906, "garde</w>": 22649, "garden": 4674, "garden</w>": 2756, "gardenchat</w>": 46292, "gardener</w>": 28554, "gardeners</w>": 38205, "gardening</w>": 10483, "gardens</w>": 6152, "gardiner</w>": 43121, "gardner</w>": 18710, "gare": 5633, "gare</w>": 48402, "gareth": 37140, "gareth</w>": 18175, "garfield</w>": 26728, "garh</w>": 16762, "gari": 40898, "gari</w>": 43080, "garis</w>": 37839, "garland</w>": 23418, "garlic</w>": 9685, "garment</w>": 31418, "garments</w>": 43341, "garmin</w>": 39885, "garner</w>": 20340, "garnet</w>": 37669, "garo</w>": 30388, "garrett</w>": 15881, "garri": 21764, "garrison</w>": 30108, "garros</w>": 40425, "garry</w>": 24398, "gars</w>": 12055, "gart": 18380, "gart</w>": 18751, "garten</w>": 14684, "garter</w>": 48420, "garth": 45398, "garth</w>": 24469, "gartner": 43334, "gartner</w>": 29678, "garty</w>": 46383, "garu</w>": 31140, "garvey</w>": 39511, "garwal</w>": 38623, "gary": 10535, "gary</w>": 4516, "garza</w>": 49393, "gas": 5047, "gas</w>": 2474, "gases</w>": 36971, "gasoline</w>": 27691, "gasp</w>": 43762, "gaston</w>": 40669, "gastri": 49197, "gastro": 23740, "gastron": 30699, "gastronomy</w>": 46987, "gat": 5314, "gat</w>": 18941, "gata</w>": 44575, "gate": 8071, "gate</w>": 3302, "gated</w>": 23997, "gates</w>": 9472, "gateshead</w>": 40051, "gateway": 45221, "gateway</w>": 14943, "gather": 36345, "gather</w>": 12602, "gathered</w>": 14646, "gathering</w>": 9197, "gatherings</w>": 48096, "gathers</w>": 39250, "gating</w>": 27561, "gation</w>": 11095, "gations</w>": 33906, "gato</w>": 44492, "gator": 20216, "gator</w>": 16390, "gatorade</w>": 36354, "gators</w>": 17173, "gatory</w>": 24796, "gatsby</w>": 32586, "gatwick</w>": 37122, "gau": 5919, "gau</w>": 43068, "gauge</w>": 18728, "gaunt": 31862, "gauntlet</w>": 37163, "gautam": 45853, "gautam</w>": 31356, "gauteng</w>": 40333, "gav": 8966, "gave</w>": 3485, "gavin": 32974, "gavin</w>": 16389, "gaw": 15405, "gawd</w>": 43239, "gawx</w>": 43420, "gay": 7460, "gay</w>": 5627, "gaya</w>": 39477, "gaye</w>": 41401, "gayle</w>": 29998, "gayo</w>": 36768, "gays</w>": 28001, "gaz": 4837, "gaz</w>": 36475, "gaza": 38391, "gaza</w>": 10112, "gazaunderattack</w>": 42458, "gaze</w>": 23212, "gazette</w>": 20443, "gazing</w>": 28373, "gb": 8727, "gb</w>": 4619, "gba</w>": 18528, "gbbo</w>": 34474, "gbc</w>": 42993, "gbp</w>": 27391, "gbr</w>": 31984, "gby</w>": 40509, "gc": 8577, "gc</w>": 6043, "gcc</w>": 26804, "gcse</w>": 28763, "gcu</w>": 34137, "gd": 13264, "gd</w>": 14604, "gdc</w>": 32793, "gden</w>": 44928, "gdp</w>": 17100, "gdpr</w>": 22963, "ge": 619, "ge</w>": 710, "gea</w>": 26790, "gear": 15532, "gear</w>": 4802, "gearbox</w>": 42454, "geared</w>": 33903, "gearing</w>": 19027, "gears</w>": 21147, "geaux": 36313, "gecko</w>": 38616, "ged": 17252, "ged</w>": 3480, "geddon</w>": 31720, "gedly</w>": 13991, "gee": 9806, "gee</w>": 9071, "geek": 17920, "geek</w>": 7135, "geeks</w>": 20110, "geeky</w>": 47332, "geel": 25906, "geelong</w>": 34555, "gees</w>": 38088, "geese</w>": 26413, "geez</w>": 42394, "geh": 30320, "geist</w>": 38290, "gel": 7343, "gel</w>": 5697, "gelato</w>": 29577, "gels</w>": 42552, "gely</w>": 14637, "gem": 14261, "gem</w>": 7613, "gement</w>": 19495, "gemini</w>": 23086, "gemma</w>": 23952, "gems</w>": 14355, "gemstone</w>": 27747, "gemstones</w>": 43972, "gen": 1024, "gen</w>": 3278, "gence</w>": 16088, "gency</w>": 5245, "gend": 33247, "gender": 22976, "gender</w>": 5906, "gendere": 35824, "genderequality</w>": 43338, "gene": 5822, "gene</w>": 7962, "genealo": 24142, "genealogy</w>": 29381, "gener": 1832, "general": 20576, "general</w>": 3658, "generally</w>": 19256, "generals</w>": 30296, "generate</w>": 16896, "generated</w>": 19450, "generates</w>": 33938, "generating</w>": 23882, "generation": 41211, "generation</w>": 4883, "generational</w>": 34506, "generations</w>": 12247, "generative</w>": 29472, "generator</w>": 19399, "generators</w>": 41917, "generic</w>": 26978, "generosity</w>": 23015, "generous</w>": 12570, "generously</w>": 35113, "genes</w>": 19683, "genesis</w>": 13518, "genetic": 47746, "genetic</w>": 13578, "genetically</w>": 36745, "genetics</w>": 18276, "geneva</w>": 14799, "genevie": 41633, "genevieve</w>": 46584, "geni": 22334, "genic</w>": 15750, "genie</w>": 24221, "genital</w>": 32960, "genius</w>": 8235, "geniuses</w>": 41406, "geno": 41544, "geno</w>": 46776, "genoa</w>": 43993, "genoci": 14687, "genocide</w>": 15903, "genome</w>": 23991, "genomic</w>": 44371, "genomics</w>": 26227, "genre</w>": 14249, "genres</w>": 30340, "gens</w>": 17449, "gent": 3685, "gent</w>": 7139, "gente</w>": 34325, "gentle": 7262, "gentle</w>": 13577, "gentleman</w>": 13293, "gentlemen</w>": 11692, "gently</w>": 17187, "gento</w>": 28320, "gentri": 41148, "gentry</w>": 47225, "gents</w>": 18862, "genu": 9182, "genuine</w>": 12184, "genuinely</w>": 20006, "genus</w>": 38161, "geny</w>": 35323, "geo": 5038, "geo</w>": 11604, "geocaching</w>": 47908, "geof": 20629, "geoff": 33697, "geoff</w>": 20386, "geoffrey</w>": 29520, "geograph": 45920, "geographic</w>": 22635, "geographical</w>": 39380, "geography</w>": 17101, "geological</w>": 38380, "geology</w>": 21578, "geom": 46135, "geome": 12958, "geometric</w>": 22419, "geometry</w>": 21731, "geon": 20844, "geon</w>": 7295, "geons</w>": 15914, "geopol": 39758, "geor": 2549, "georg</w>": 43126, "george": 8377, "george</w>": 3296, "georges</w>": 25042, "georgetown</w>": 22970, "georgie</w>": 42115, "georgina</w>": 43892, "geospatial</w>": 46238, "geothermal</w>": 38413, "geous</w>": 3068, "ger": 1291, "ger</w>": 1502, "gera</w>": 48867, "gerald": 29901, "gerald</w>": 13269, "gerard": 35979, "gerard</w>": 20826, "gerber</w>": 45058, "gered</w>": 40179, "geri": 41664, "geri</w>": 46214, "gering</w>": 24077, "germain</w>": 38786, "german": 14972, "german</w>": 4710, "germans</w>": 28400, "germany</w>": 4464, "germin": 44721, "germs</w>": 47731, "geronimo</w>": 45171, "gerrard</w>": 26538, "gerry": 29825, "gerry</w>": 23026, "gers</w>": 3314, "gertrude</w>": 46950, "gervais</w>": 36527, "gery</w>": 32845, "ges</w>": 3316, "gest": 11843, "gest</w>": 2033, "gesture</w>": 21780, "gestures</w>": 43524, "get": 5670, "get</w>": 779, "geta": 13155, "getaway</w>": 16131, "gether</w>": 27224, "getic</w>": 20661, "getin": 25822, "getit": 44891, "getit</w>": 48315, "getoutside</w>": 35644, "gets": 39448, "gets</w>": 2127, "gett": 6647, "gett</w>": 27965, "gettable</w>": 15620, "gette</w>": 29800, "gettin</w>": 13428, "getting": 30885, "getting</w>": 1500, "getty": 31185, "getty</w>": 13965, "gettys": 35189, "gettysburg</w>": 37062, "getyour": 42159, "gey</w>": 29289, "gf": 28953, "gf</w>": 10846, "gfriend</w>": 35245, "gfs</w>": 37553, "gg": 1129, "gg</w>": 3286, "gga</w>": 26003, "ggan</w>": 25626, "gge": 21521, "gge</w>": 31659, "gged</w>": 6095, "gger": 12367, "gger</w>": 3493, "ggers</w>": 7480, "ggg</w>": 20143, "gggg</w>": 33513, "ggi</w>": 21662, "ggin</w>": 17160, "gging</w>": 4966, "ggins</w>": 12444, "ggle": 34981, "ggle</w>": 11430, "ggled</w>": 46328, "ggles</w>": 14703, "ggling</w>": 16523, "ggly</w>": 39407, "ggs</w>": 4797, "ggy": 24935, "ggy</w>": 6476, "gh": 583, "gh</w>": 790, "gha": 10010, "gha</w>": 25183, "gham": 21456, "ghan": 18945, "ghan</w>": 6624, "ghana": 30330, "ghana</w>": 9731, "ghanaian</w>": 34223, "ghani</w>": 36699, "ghar": 37334, "ghar</w>": 36973, "ghat</w>": 43989, "ghaz": 37493, "ghc</w>": 42139, "ghe": 10754, "ghe</w>": 28561, "ghead</w>": 40783, "ghee</w>": 34794, "gher": 21542, "gher</w>": 14796, "ghet": 18447, "ghetti</w>": 17485, "ghetto</w>": 22403, "ghi": 22436, "ghi</w>": 22279, "ghibli</w>": 40555, "ghj": 38439, "ghlin</w>": 24131, "gho": 4307, "ghorn</w>": 38094, "ghosh</w>": 43279, "ghoshal</w>": 49134, "ghost": 11417, "ghost</w>": 7108, "ghostbusters</w>": 25462, "ghostly</w>": 44901, "ghosts</w>": 16737, "ghou": 35843, "ghoul</w>": 45302, "ghouse</w>": 38238, "ghs</w>": 14157, "ght": 1413, "ght</w>": 630, "ghted</w>": 4963, "ghter</w>": 2427, "ghters</w>": 12994, "ghtful</w>": 8334, "ghting</w>": 3019, "ghtly</w>": 6993, "ghtning</w>": 39740, "ghton</w>": 16353, "ghts</w>": 1259, "ghty": 20968, "ghty</w>": 5866, "ghu": 25808, "ghue</w>": 45675, "ghyun</w>": 25010, "ghz</w>": 24325, "gi": 707, "gi</w>": 4478, "gia</w>": 8864, "giac": 35444, "giam": 39623, "gian": 17274, "gian</w>": 12866, "gianni</w>": 46752, "giant": 23668, "giant</w>": 4687, "giants</w>": 7076, "giar": 34241, "gib": 9816, "gibb": 18964, "gibbons</w>": 31974, "gibbs</w>": 26488, "gibility</w>": 33297, "gible</w>": 13159, "gibr": 20206, "gibraltar</w>": 23988, "gibson": 37420, "gibson</w>": 12178, "gic": 27900, "gic</w>": 2570, "gical</w>": 32973, "gically</w>": 26320, "gid": 36774, "gid</w>": 21413, "giddy</w>": 40894, "gideon</w>": 43867, "gidi</w>": 30603, "gie": 11459, "gie</w>": 3991, "gier</w>": 28974, "gies</w>": 5505, "gif": 11363, "gif</w>": 11677, "gifford</w>": 47850, "gifs</w>": 37643, "gift": 20569, "gift</w>": 2733, "gifted</w>": 15110, "giftide": 20152, "giftideas</w>": 23487, "gifting</w>": 39546, "gifts</w>": 5836, "gig": 26981, "gig</w>": 7471, "gigab": 34530, "gigan": 24104, "gigantic</w>": 31507, "giggle</w>": 36426, "giggles</w>": 42731, "giggs</w>": 44692, "gigi": 44106, "gigi</w>": 26171, "gigs</w>": 20316, "gil": 3997, "gil</w>": 10088, "gila</w>": 46952, "gilbert</w>": 14154, "gilded</w>": 44341, "giles</w>": 24802, "gill": 14280, "gill</w>": 12003, "gille": 29610, "gilles</w>": 39590, "gillespie</w>": 36242, "gillette</w>": 38603, "gilli": 13695, "gillian</w>": 28753, "gills</w>": 48851, "gilmore</w>": 27603, "gilt</w>": 44378, "gim": 31284, "gimm": 40692, "gimme</w>": 21525, "gin": 3374, "gin</w>": 4941, "gina</w>": 15604, "gine": 27482, "ging": 10829, "ging</w>": 3905, "ginger": 16287, "ginger</w>": 9718, "gingerbread</w>": 23692, "gini</w>": 35768, "gino</w>": 36521, "gins</w>": 18328, "gio": 16329, "gio</w>": 8050, "gion</w>": 41226, "gior": 14920, "giorgio</w>": 33271, "giorno</w>": 33310, "gios</w>": 41927, "gious</w>": 14419, "giov": 21404, "giovanni</w>": 26574, "gipp": 41351, "gir": 1077, "gir</w>": 25481, "gira": 16949, "giraffe</w>": 22826, "giri</w>": 31709, "girl": 3914, "girl</w>": 1611, "girlfriend</w>": 8217, "girlfriends</w>": 30736, "girlpower</w>": 37433, "girls": 15480, "girls</w>": 1917, "girly</w>": 29605, "giro": 39664, "giro</w>": 26454, "girona</w>": 47842, "giroud</w>": 41177, "gis": 16266, "gis</w>": 12773, "gist</w>": 21241, "git": 16060, "git</w>": 20918, "gita</w>": 40838, "github</w>": 31196, "giu": 17931, "giuli": 29762, "giuliani</w>": 47739, "giuse": 29385, "giuseppe</w>": 33563, "give": 4120, "give</w>": 1781, "giveaway</w>": 5310, "giveaways</w>": 18974, "giveback</w>": 41385, "given": 33323, "given</w>": 4302, "givenchy</w>": 38245, "giver</w>": 43339, "gives</w>": 3926, "giveup</w>": 35485, "giving": 14673, "giving</w>": 2339, "givingback</w>": 49300, "givingtuesday</w>": 23556, "giz": 29237, "gk": 38953, "gk</w>": 18719, "gl": 1849, "gl</w>": 14751, "gla": 1523, "gla</w>": 36904, "glaci": 14924, "glacial</w>": 40782, "glacier</w>": 19282, "glaciers</w>": 42528, "glad": 20841, "glad</w>": 4761, "glades</w>": 37432, "gladi": 21742, "gladiator</w>": 38477, "gladiators</w>": 41087, "gladly</w>": 41598, "gladys</w>": 43168, "glam": 8738, "glam</w>": 16905, "glamorous</w>": 22896, "glamour": 42876, "glamour</w>": 17499, "glamping</w>": 46167, "glan": 40482, "glan</w>": 45844, "glance</w>": 26557, "gland</w>": 41441, "glar": 48535, "glar</w>": 41702, "glare</w>": 46035, "glas": 29935, "glas</w>": 43654, "glasgo": 6757, "glasgow": 29990, "glasgow</w>": 7363, "glass": 16305, "glass</w>": 3313, "glasses</w>": 6116, "glaston": 26848, "glastonbury</w>": 28233, "glau": 39171, "glaze</w>": 28112, "glazed</w>": 24122, "gle": 7166, "gle</w>": 2865, "glee": 32379, "glee</w>": 21614, "glen": 6158, "glen</w>": 11049, "glend": 38332, "glendale</w>": 33043, "glenn": 32004, "glenn</w>": 12861, "gler</w>": 34649, "gley</w>": 21998, "gli": 5896, "gli</w>": 28791, "glia</w>": 22217, "glide</w>": 37321, "glider</w>": 41636, "glimp": 12888, "glimpse</w>": 13817, "glio</w>": 29785, "glit": 21079, "glitch</w>": 29563, "glitter</w>": 16528, "glitz</w>": 44542, "glo": 1721, "glo</w>": 30474, "glob": 13363, "global": 6707, "global</w>": 2779, "globalgoals</w>": 33211, "globalhealth</w>": 46751, "globalization</w>": 47680, "globally</w>": 17775, "globalwarming</w>": 46017, "globe": 19436, "globe</w>": 9368, "globes</w>": 38085, "glock</w>": 38818, "glomer": 43689, "gloom</w>": 48594, "gloomy</w>": 32199, "glori": 7270, "gloria</w>": 19244, "glorious</w>": 9171, "glory": 36107, "glory</w>": 7285, "glos</w>": 40633, "gloss": 38258, "gloss</w>": 22014, "glossy</w>": 29802, "glou": 15989, "gloucester": 28133, "gloucester</w>": 23835, "gloucestershire</w>": 33789, "glove</w>": 16078, "glover</w>": 21594, "gloves</w>": 12363, "glow": 30472, "glow</w>": 10111, "glowing</w>": 18437, "glows</w>": 48107, "glu": 5952, "glu</w>": 32281, "glucose</w>": 34642, "glue</w>": 22103, "glued</w>": 38135, "gluten": 15482, "gluten</w>": 15524, "glutenfree</w>": 16138, "gly": 13027, "glycer": 48914, "gm": 18743, "gm</w>": 5918, "gma</w>": 18155, "gmail</w>": 11119, "gman": 41043, "gman</w>": 36936, "gmb": 35934, "gmb</w>": 31799, "gmbh</w>": 46877, "gmc</w>": 27257, "gmo</w>": 23486, "gms</w>": 36987, "gmt</w>": 13803, "gn": 2455, "gn</w>": 9831, "gna</w>": 23009, "gnation</w>": 45912, "gne</w>": 25407, "gni": 5104, "gnment</w>": 25110, "gno": 23376, "gno</w>": 43686, "gnocchi</w>": 48299, "gnome</w>": 33643, "gnon</w>": 20561, "go": 650, "go</w>": 861, "goa</w>": 14399, "goal": 9003, "goal</w>": 3321, "goalie</w>": 20723, "goalkeeper</w>": 16601, "goals</w>": 3295, "goalscorer</w>": 43547, "goaltender</w>": 44151, "goat": 34082, "goat</w>": 9530, "goats</w>": 18393, "gob": 29559, "gobeavs</w>": 48285, "goblin</w>": 26223, "goblue</w>": 25232, "gobucks</w>": 29175, "gocougs</w>": 34202, "god": 4190, "god</w>": 1731, "godawgs</w>": 40436, "godbless": 46616, "godbless</w>": 44007, "godd": 16589, "goddamn</w>": 28495, "goddard</w>": 37827, "goddess</w>": 10808, "godfather</w>": 26222, "godfrey</w>": 40148, "godis": 38521, "godly</w>": 42438, "gods": 33620, "gods</w>": 10328, "goducks</w>": 35889, "godzilla</w>": 23369, "goe": 22084, "goers</w>": 27784, "goes": 43581, "goes</w>": 2635, "gof": 17537, "goff</w>": 34399, "goftheday</w>": 39360, "gofund": 34445, "gofundme</w>": 34686, "gog</w>": 42949, "goggles</w>": 31027, "gogh</w>": 19697, "gogo</w>": 22688, "gogreen</w>": 36279, "gohawks</w>": 34884, "goi</w>": 24917, "goin</w>": 13939, "going": 25787, "going</w>": 1245, "goku</w>": 29550, "gol": 1537, "gol</w>": 18257, "gola</w>": 41090, "gold": 4999, "gold</w>": 2209, "goldberg</w>": 25161, "goldcoast</w>": 34634, "golden": 10763, "golden</w>": 3878, "goldeng": 20650, "goldenglobes</w>": 26842, "goldfish</w>": 40293, "goldie</w>": 42805, "goldman</w>": 27164, "golds": 30526, "golds</w>": 40283, "goldsmith</w>": 40214, "gole": 41297, "golf": 9096, "golf</w>": 3096, "golfclub</w>": 45742, "golfer</w>": 24579, "golfers</w>": 28441, "golfing</w>": 31379, "goli": 29265, "goliath</w>": 41602, "gom": 7051, "goma</w>": 46198, "gomes</w>": 39128, "gomez</w>": 16433, "gon": 1854, "gon</w>": 3379, "gona</w>": 34835, "gone": 35135, "gone</w>": 3601, "gong</w>": 28486, "gonna</w>": 2562, "gonz": 10587, "gonzaga</w>": 36241, "gonzale": 17512, "gonzales</w>": 31265, "gonzalez</w>": 18198, "goo": 1381, "goo</w>": 17882, "good": 2185, "good</w>": 886, "goodbye</w>": 6968, "goodday": 46284, "goode</w>": 42076, "goodfood</w>": 46844, "goodfriday</w>": 40360, "goodie</w>": 29213, "goodies</w>": 13308, "goodluck</w>": 19718, "goodman</w>": 24146, "goodmorning</w>": 14421, "goodness</w>": 10531, "goodnight</w>": 8540, "goodreads</w>": 31629, "goods</w>": 9340, "goodtimes</w>": 22570, "goodvibes</w>": 43146, "goodwill</w>": 24902, "goodwin</w>": 28080, "goodwood</w>": 30008, "goody</w>": 35937, "goodyear</w>": 42858, "goofy</w>": 26879, "goog": 18581, "google": 12195, "google</w>": 3460, "googled</w>": 40345, "googleplay</w>": 37309, "goon</w>": 15267, "goons</w>": 30440, "goooo": 35876, "goooo</w>": 48957, "goose": 21445, "goose</w>": 13822, "goosebumps</w>": 32254, "gop": 18942, "gop</w>": 6250, "gopack": 46995, "gopackgo</w>": 47719, "gopal</w>": 47268, "gopdebate</w>": 39806, "gopher": 47750, "gopher</w>": 48905, "gophers</w>": 31957, "gopro</w>": 17511, "gor": 1747, "gor</w>": 29827, "gordo</w>": 47707, "gordon": 20485, "gordon</w>": 8244, "gore": 30311, "gore</w>": 17872, "gorg</w>": 46815, "gorge": 35548, "gorge</w>": 20038, "gorgeous</w>": 3241, "gori": 12461, "goria</w>": 43359, "gorilla": 37910, "gorilla</w>": 21994, "gorman</w>": 35741, "goro</w>": 44977, "gory</w>": 7160, "gos": 20517, "gos</w>": 5693, "gosh</w>": 15395, "gosling</w>": 35320, "gosp": 9617, "gospel</w>": 11313, "goss": 39734, "goss</w>": 36924, "gossi": 15684, "gossip</w>": 18963, "got": 10125, "got</w>": 1005, "gota</w>": 36693, "gotcha</w>": 43275, "gote</w>": 49345, "goth": 48465, "goth</w>": 20437, "gotham": 46123, "gotham</w>": 18299, "gothic</w>": 15426, "goti": 9497, "goto": 39715, "gots</w>": 35215, "gott": 5089, "gott</w>": 36466, "gotta</w>": 4633, "gotten</w>": 5889, "gotti</w>": 41881, "gotv</w>": 36089, "gou": 10520, "gou</w>": 36555, "gouache</w>": 43314, "goul": 33187, "gould</w>": 31087, "gour": 13580, "gourmet</w>": 19111, "gov": 4022, "gov</w>": 4564, "gove</w>": 36997, "govegan</w>": 38886, "gover": 10471, "gover</w>": 16759, "govern": 2351, "govern</w>": 32404, "governance</w>": 13386, "governing</w>": 30946, "government</w>": 3149, "governmental</w>": 42609, "governments</w>": 19582, "governor": 17459, "governor</w>": 6630, "governors</w>": 26881, "govin": 42451, "govt</w>": 5345, "govuk</w>": 28830, "gow": 21885, "gow</w>": 33788, "gowan</w>": 31307, "gower</w>": 43448, "gown</w>": 13719, "gowns</w>": 38029, "goyal</w>": 35105, "gp": 19329, "gp</w>": 5051, "gpa</w>": 24098, "gps</w>": 13639, "gpu</w>": 38561, "gq": 40286, "gq</w>": 31324, "gr": 709, "gr</w>": 6062, "gra": 782, "gra</w>": 15276, "grab</w>": 4646, "grabbed</w>": 22856, "grabbing</w>": 26440, "grabs</w>": 17076, "grac": 11323, "grace": 13225, "grace</w>": 5142, "graced</w>": 31894, "graceful</w>": 25242, "graces</w>": 38629, "graci": 11174, "gracias</w>": 16463, "gracie</w>": 23235, "gracing</w>": 37263, "gracious</w>": 29044, "grad": 19869, "grad</w>": 7291, "gradable</w>": 41529, "grade": 45435, "grade</w>": 3394, "graded</w>": 13823, "grader</w>": 23930, "graders</w>": 10930, "grades</w>": 10838, "gradient</w>": 36885, "grading</w>": 19016, "grads</w>": 17811, "gradu": 3230, "gradual</w>": 45210, "gradually</w>": 32192, "graduate</w>": 6675, "graduated</w>": 15128, "graduates</w>": 12236, "graduating</w>": 14819, "graduation</w>": 8060, "grady</w>": 33980, "graeme</w>": 30192, "graf": 46478, "graf</w>": 39765, "graff": 10656, "graffiti</w>": 11676, "graft</w>": 32698, "grafton</w>": 47347, "graham": 19805, "graham</w>": 7711, "grail</w>": 37184, "grain": 44003, "grain</w>": 12109, "grains</w>": 25791, "gral</w>": 25631, "gram": 2949, "gram</w>": 2338, "grammar</w>": 16077, "grammy</w>": 15388, "grammys</w>": 18121, "grams</w>": 6294, "gran": 3892, "gran</w>": 14493, "granada</w>": 31172, "grand": 3058, "grand</w>": 2991, "grandad</w>": 29148, "grandchildren</w>": 36856, "granddaughter</w>": 29460, "grande": 37514, "grande</w>": 10757, "grandes</w>": 36382, "grandfather</w>": 15346, "grandma</w>": 10525, "grandmother</w>": 17469, "grandpa</w>": 14582, "grandparents</w>": 21311, "grandprix</w>": 39358, "grandson</w>": 20766, "grandstand</w>": 43172, "grange": 45027, "grange</w>": 23850, "granger</w>": 42968, "granite</w>": 18813, "grann</w>": 45585, "granny</w>": 22710, "granola</w>": 34271, "grant": 18682, "grant</w>": 5442, "granted</w>": 14156, "granth": 41283, "grants</w>": 15123, "grape": 19131, "grape</w>": 15959, "grapefruit</w>": 28347, "grapes</w>": 18580, "grapevine</w>": 47619, "graph": 1349, "graph</w>": 4407, "graphene</w>": 38387, "grapher</w>": 14987, "graphers</w>": 32088, "graphic": 15653, "graphic</w>": 4245, "graphical</w>": 20878, "graphicdesign</w>": 21907, "graphics</w>": 9492, "graphies</w>": 40164, "graphite</w>": 29447, "graphs</w>": 24670, "graphy</w>": 4897, "grapp": 30843, "gras": 31517, "gras</w>": 17584, "grasp</w>": 34975, "grass": 11584, "grass</w>": 5922, "grasses</w>": 46807, "grasshopper</w>": 48894, "grassi</w>": 42294, "grasso</w>": 34808, "grassroots</w>": 21991, "grassy</w>": 44140, "grat": 9221, "grate": 32463, "grateful": 45659, "grateful</w>": 5730, "grati": 36402, "gratis</w>": 33638, "gratitude</w>": 12614, "grav": 20663, "grave": 16606, "grave</w>": 9981, "gravel</w>": 27054, "graves</w>": 17665, "graveyard</w>": 31176, "gravit": 26150, "gravitational</w>": 45268, "gravity": 47426, "gravity</w>": 15160, "gravy</w>": 21225, "gray": 12703, "gray</w>": 7048, "grays</w>": 46848, "grayson": 45831, "grayson</w>": 25471, "grazi": 42427, "grazie</w>": 38698, "grazing</w>": 29889, "grc</w>": 44069, "gre": 689, "gre</w>": 17878, "grease</w>": 24132, "greasy</w>": 44376, "great": 3265, "great</w>": 830, "greate": 31930, "greater": 32725, "greater</w>": 7033, "greatest": 39080, "greatest</w>": 4153, "greatly</w>": 13978, "greatness</w>": 14189, "greats</w>": 21855, "greaves</w>": 42350, "greco</w>": 39103, "gree": 9987, "gree</w>": 30774, "greece</w>": 6965, "greed</w>": 26147, "greedy</w>": 33301, "greek": 23844, "greek</w>": 6842, "greeks</w>": 35866, "green": 2762, "green</w>": 1901, "greenberg</w>": 46662, "greene</w>": 16383, "greener</w>": 31169, "greenery</w>": 42493, "greenfield</w>": 39924, "greeng": 42077, "greenhouse</w>": 20819, "greening</w>": 48673, "greenland</w>": 27345, "greenpeace</w>": 44755, "greens</w>": 10235, "greensboro</w>": 33436, "greenville</w>": 25156, "greenway</w>": 35205, "greenwich</w>": 18658, "greenwood</w>": 25782, "greer</w>": 34345, "greet": 11042, "greet</w>": 11997, "greeted</w>": 24546, "greeting</w>": 17754, "greetings</w>": 11569, "greets</w>": 25464, "greg": 6894, "greg</w>": 7943, "gregation</w>": 20131, "gregg": 39422, "gregg</w>": 22929, "gregor": 33856, "gregor</w>": 16177, "gregory</w>": 16253, "gren": 13941, "gren</w>": 20119, "grenade</w>": 33679, "grenfell</w>": 42107, "gres</w>": 39670, "gress</w>": 2752, "gret": 30041, "greta</w>": 33443, "gretchen</w>": 45516, "grette</w>": 38774, "grew</w>": 10451, "grey": 9190, "grey</w>": 5046, "greyhound</w>": 27363, "greyhounds</w>": 45718, "greys</w>": 44311, "greysanatomy</w>": 36833, "gri": 2169, "gri</w>": 18484, "grid": 29067, "grid</w>": 9882, "gridi": 41063, "gridiron</w>": 47786, "grids</w>": 46500, "grief</w>": 21058, "grier</w>": 22016, "griev": 36400, "grieving</w>": 42383, "griez": 47962, "griezmann</w>": 48396, "griff": 17855, "griff</w>": 35551, "griffi": 28676, "griffin": 46612, "griffin</w>": 13161, "griffith</w>": 24375, "griffiths</w>": 34182, "gril": 49091, "grill": 44083, "grill</w>": 9519, "grille</w>": 34748, "grilled</w>": 10691, "grilling</w>": 28324, "grills</w>": 39464, "grim": 20383, "grim</w>": 23635, "grime</w>": 37101, "grimes</w>": 25057, "grimm</w>": 27865, "grims": 34861, "grimsby</w>": 41513, "grin": 11033, "grin</w>": 28697, "grinch</w>": 40527, "grind": 25730, "grind</w>": 11810, "grinder</w>": 31733, "grinding</w>": 21541, "gring</w>": 40135, "grip</w>": 15521, "gripping</w>": 34567, "grips</w>": 27819, "gris</w>": 29150, "grit": 22037, "grit</w>": 22087, "grits</w>": 44307, "gritty</w>": 33704, "grizz": 14877, "grizz</w>": 44088, "grizzlies</w>": 25594, "grizzly</w>": 29676, "grl</w>": 48005, "gro": 1464, "gro</w>": 12691, "grocer": 11633, "groceries</w>": 32409, "grocery</w>": 13826, "grom</w>": 45284, "gron": 22345, "groningen</w>": 45639, "groo": 9015, "groom": 39883, "groom</w>": 22813, "grooming</w>": 25575, "groot</w>": 37708, "groove": 39484, "groove</w>": 17680, "grooves</w>": 43954, "groovy</w>": 30143, "gros": 26834, "gros</w>": 32639, "gross": 31080, "gross</w>": 11541, "grosven": 46911, "grote": 47207, "grotto</w>": 45260, "grou": 1582, "groun": 45110, "ground": 9558, "ground</w>": 2461, "groundbreaking</w>": 21006, "grounded</w>": 27799, "grounds</w>": 8454, "groundwater</w>": 39457, "group": 19045, "group</w>": 1771, "groupe</w>": 47654, "groups</w>": 6776, "grouse</w>": 36327, "grove": 31756, "grove</w>": 7463, "grover</w>": 31345, "groves</w>": 27306, "grow": 3179, "grow</w>": 4559, "grower</w>": 44925, "growers</w>": 25689, "growing": 28429, "growing</w>": 4425, "growingup": 43433, "growler</w>": 47096, "grown": 41762, "grown</w>": 7120, "grows</w>": 13352, "growth": 17925, "growth</w>": 4026, "growthhacking</w>": 25963, "grp</w>": 27321, "grt</w>": 28557, "gru": 5957, "grub</w>": 34019, "grue": 42047, "gruesome</w>": 47111, "grum": 45454, "grump": 49015, "grumpy</w>": 23610, "grun": 16203, "grunge</w>": 33745, "gry": 16140, "gry</w>": 5364, "gs": 25818, "gs</w>": 1345, "gsa</w>": 40433, "gsc</w>": 47751, "gshore</w>": 43392, "gsm</w>": 32181, "gsp</w>": 49173, "gst</w>": 22239, "gt": 16151, "gt</w>": 4725, "gta": 14826, "gta</w>": 15338, "gtaonline</w>": 27292, "gtav</w>": 27283, "gti</w>": 39954, "gto</w>": 39071, "gtr</w>": 33407, "gts</w>": 37338, "gtx</w>": 35230, "gu": 700, "gu</w>": 12916, "gua</w>": 23751, "guacam": 37477, "guacamole</w>": 40115, "guad": 22966, "guadal": 46097, "guadalu": 36994, "guadalupe</w>": 38360, "guam</w>": 37325, "guan": 44191, "guan</w>": 42406, "guang": 27019, "guangzhou</w>": 37857, "guar": 4119, "guaran": 9242, "guarantee</w>": 17421, "guaranteed</w>": 14731, "guarantees</w>": 40154, "guard": 30776, "guard</w>": 4901, "guarded</w>": 40602, "guardi": 12008, "guardia</w>": 43628, "guardian": 23713, "guardian</w>": 9498, "guardians</w>": 21479, "guarding</w>": 24966, "guardiola</w>": 32100, "guards</w>": 12810, "guatem": 19423, "guatemala</w>": 21670, "guay": 48591, "guay</w>": 24247, "gubernat": 41400, "gubernatorial</w>": 41618, "gucci</w>": 16779, "gud": 48061, "gud</w>": 22378, "gue": 2030, "gue</w>": 2917, "gued</w>": 38893, "guel": 23146, "guelph</w>": 27660, "guer": 10391, "guern": 29277, "guernsey</w>": 33982, "guerra</w>": 38215, "guerrero</w>": 31967, "guerrilla</w>": 36715, "gues": 39971, "gues</w>": 12601, "guess": 35506, "guess</w>": 3135, "guessed</w>": 28005, "guesses</w>": 30623, "guessing</w>": 21891, "guest": 27349, "guest</w>": 3781, "guests</w>": 6212, "guet</w>": 36797, "guetta</w>": 45904, "guez</w>": 12313, "gug": 31358, "guggen": 35086, "guggenheim</w>": 37135, "gui": 2587, "gui</w>": 25746, "guid": 11437, "guidance</w>": 12508, "guide": 21845, "guide</w>": 3555, "guided</w>": 13194, "guidelines</w>": 16591, "guides</w>": 14375, "guiding</w>": 22759, "guido</w>": 41818, "guil": 5008, "guild": 19755, "guild</w>": 16597, "guildford</w>": 34450, "guildhall</w>": 47224, "guillau": 41123, "guillaume</w>": 45394, "guiller": 33660, "guillermo</w>": 39524, "guilt</w>": 26354, "guilty</w>": 9761, "guin": 13284, "guin</w>": 47863, "guine": 13759, "guinea</w>": 18537, "guinness</w>": 16648, "guire</w>": 18209, "guise</w>": 42024, "guit": 3759, "guitar": 21746, "guitar</w>": 5084, "guitarist</w>": 13035, "guitars</w>": 15023, "guj": 34935, "gujar": 12698, "gujarat</w>": 14714, "guk</w>": 20280, "gul": 5530, "gul</w>": 21350, "gula</w>": 27426, "gular</w>": 34969, "gulf": 22101, "gulf</w>": 11279, "gull": 48764, "gull</w>": 28778, "gulls</w>": 37501, "gully</w>": 46112, "gum": 22041, "gum</w>": 11235, "gumb": 40147, "gumbo</w>": 47126, "gummy</w>": 34276, "gums</w>": 46609, "gun": 2748, "gun</w>": 3496, "guna</w>": 43333, "gundam</w>": 26087, "gundy</w>": 21162, "gunman</w>": 32743, "gunmen</w>": 44738, "gunn</w>": 27473, "gunna</w>": 24002, "gunnar</w>": 45301, "gunner</w>": 35285, "gunners</w>": 37788, "guns</w>": 7591, "gunsense</w>": 44781, "gunshot</w>": 49250, "gunsn": 49028, "gup": 38632, "gup</w>": 47335, "gupta</w>": 15905, "gur": 3218, "gur</w>": 30224, "gura</w>": 46836, "gurgaon</w>": 33240, "guri</w>": 43888, "gurl</w>": 25445, "gurmee": 35482, "gurmeetramrahim</w>": 36549, "guru": 18629, "guru</w>": 10800, "gurudev</w>": 48647, "gus</w>": 8018, "gust</w>": 24629, "gusta": 23024, "gusta</w>": 44196, "gustav": 32062, "gustav</w>": 37921, "gustave</w>": 43170, "gustavo</w>": 45943, "gusto</w>": 37937, "gusts</w>": 20896, "gusty</w>": 27589, "gut": 24780, "gut</w>": 13486, "guter": 44963, "guterres</w>": 48738, "guth": 31696, "guthrie</w>": 33164, "gutier": 32773, "gutierrez</w>": 33739, "guts</w>": 25983, "gutted</w>": 26524, "gutter</w>": 40537, "guwa": 43063, "guwahati</w>": 45045, "guy": 10008, "guy</w>": 2149, "guyana</w>": 45215, "guyen</w>": 28031, "guys": 43588, "guys</w>": 1791, "guyz</w>": 48170, "guzman</w>": 37960, "gv": 15462, "gv</w>": 17336, "gw": 7172, "gw</w>": 15717, "gwen": 32165, "gwen</w>": 24182, "gwin": 43005, "gwy": 32226, "gwyne": 36923, "gx</w>": 40227, "gy": 2168, "gy</w>": 1164, "gya</w>": 43214, "gyan</w>": 43814, "gye": 21728, "gyllen": 49348, "gym": 9902, "gym</w>": 5222, "gymna": 13517, "gymnasium</w>": 42847, "gymnast</w>": 42658, "gymnastics</w>": 20116, "gyn</w>": 39603, "gyne": 45836, "gyp": 40053, "gypsy</w>": 22354, "gypt</w>": 41921, "gz": 45937, "gz</w>": 35841, "gö": 40778, "gü": 31907, "h": 71, "h</w>": 327, "ha": 560, "ha</w>": 1429, "haa</w>": 26814, "haal</w>": 35869, "haan</w>": 36284, "haar": 45247, "haar</w>": 35859, "haas</w>": 27443, "haasan</w>": 26601, "hab": 20573, "hab</w>": 20002, "haban": 46225, "haber": 44737, "habit": 8491, "habit</w>": 17215, "habitat</w>": 11747, "habitats</w>": 35344, "habits</w>": 14540, "habs</w>": 27489, "hac": 20343, "hace</w>": 43623, "haci": 40674, "hack": 6610, "hack</w>": 11182, "hackathon</w>": 25182, "hacked</w>": 19575, "hacker</w>": 22376, "hackers</w>": 21498, "hacking</w>": 12939, "hackney": 48811, "hackney</w>": 24928, "hacks</w>": 19965, "had": 10660, "had</w>": 1100, "hadi</w>": 39058, "hadid</w>": 26415, "hadith</w>": 46907, "hadley</w>": 44995, "hadn</w>": 21480, "hadoop</w>": 43868, "hae": 30723, "hae</w>": 27193, "hafi": 39914, "hag": 26855, "hag</w>": 43207, "hagan</w>": 47489, "hagen</w>": 14664, "hager": 48773, "hagg": 26324, "hague</w>": 28988, "hah": 18108, "hah</w>": 13680, "haha": 1913, "haha</w>": 3060, "hahah": 27253, "hahah</w>": 15441, "hahaha</w>": 4722, "hahahah": 37513, "hahahah</w>": 20096, "hahahaha": 8058, "hahahaha</w>": 9501, "hahahahah</w>": 33334, "hahahahaha</w>": 16347, "hahahahahaha</w>": 26487, "hahahahahahaha</w>": 43653, "hahahahahahahaha": 36126, "hahahha</w>": 49205, "hahn</w>": 35596, "hai": 8734, "hai</w>": 5234, "haider</w>": 42200, "haiku</w>": 19542, "hail": 15272, "hail</w>": 8634, "hailed</w>": 44604, "hailey</w>": 27703, "hailing</w>": 47288, "hails</w>": 32571, "hailstate</w>": 35063, "hain</w>": 23861, "hair": 4658, "hair</w>": 2225, "haircare</w>": 43682, "haircut</w>": 14711, "hairdresser</w>": 47468, "haired</w>": 27202, "hairs</w>": 27951, "hairstyle</w>": 22324, "hairstyles</w>": 40627, "hairy</w>": 26513, "haiti</w>": 17368, "haitian</w>": 37577, "haj": 27885, "haj</w>": 43191, "haji</w>": 41889, "hajj</w>": 35576, "hak": 25142, "hak</w>": 40671, "haka</w>": 44011, "hake": 41663, "hal": 1296, "hal</w>": 8708, "hala</w>": 25918, "halal</w>": 34216, "halam": 29061, "halamadrid</w>": 31132, "halder</w>": 32201, "hale": 37038, "hale</w>": 14701, "halen</w>": 39204, "halep</w>": 49017, "haley": 37330, "haley</w>": 16839, "half": 7453, "half</w>": 2349, "halftime</w>": 13742, "halfway</w>": 16736, "hali": 9860, "hali</w>": 43030, "halibut</w>": 49030, "halifax</w>": 13411, "hall": 6850, "hall</w>": 2140, "halla</w>": 29569, "halle": 27763, "halle</w>": 32239, "hallelujah</w>": 36993, "halli": 32665, "hallmark": 31040, "hallmark</w>": 32053, "hallmarkchannel</w>": 36840, "hallo": 3463, "halloffame</w>": 48578, "halloween": 28537, "halloween</w>": 3739, "halls</w>": 18052, "hallucin": 35385, "hallway</w>": 26845, "halo": 33331, "halo</w>": 11918, "halsey</w>": 34256, "halt</w>": 25640, "halter</w>": 47194, "halton</w>": 45445, "ham": 1522, "ham</w>": 1714, "hama</w>": 17944, "hamas</w>": 14818, "hamburg</w>": 18409, "hamburger</w>": 33928, "hamid</w>": 32377, "hamil": 6725, "hamill": 45784, "hamill</w>": 48729, "hamillhimself</w>": 47324, "hamilton": 22448, "hamilton</w>": 7684, "hamlet</w>": 27722, "hamlin</w>": 49326, "hamm</w>": 46110, "hammer": 15331, "hammer</w>": 9401, "hammered</w>": 37251, "hammers</w>": 35649, "hammersmith</w>": 42127, "hammock</w>": 33682, "hammond</w>": 21761, "hamont</w>": 18518, "hamp": 6665, "hamper</w>": 27692, "hampshire</w>": 16006, "hampstead</w>": 37340, "hampton": 36582, "hampton</w>": 12285, "hamptons</w>": 42415, "hamr</w>": 47979, "hamradio</w>": 36712, "hams</w>": 25619, "hamster</w>": 33313, "hamstring</w>": 39990, "hamza</w>": 45762, "han": 1545, "han</w>": 3565, "hana</w>": 16801, "hand": 1722, "hand</w>": 2463, "handbag</w>": 22654, "handbags</w>": 35667, "handball</w>": 27988, "handbook</w>": 25147, "handcrafted</w>": 22185, "handed</w>": 10881, "handedly</w>": 48656, "handel</w>": 40072, "handful</w>": 23725, "handheld</w>": 26812, "handic": 17812, "handicap</w>": 27063, "handicapp": 42349, "handing</w>": 19196, "handle": 43681, "handle</w>": 7245, "handled</w>": 26824, "handler</w>": 29097, "handles</w>": 22124, "handling</w>": 14071, "handmade": 18054, "handmade</w>": 6737, "handmadehour</w>": 25724, "handover</w>": 46922, "hands</w>": 3500, "handshake</w>": 38418, "handsome</w>": 7438, "handwriting</w>": 29986, "handwritten</w>": 35192, "handy</w>": 13479, "hane</w>": 28411, "hang": 3351, "hang</w>": 5592, "hangar</w>": 33439, "hanged</w>": 40807, "hanger</w>": 28905, "hangin</w>": 22670, "hanging</w>": 4850, "hangout</w>": 17572, "hangover</w>": 20755, "hangs</w>": 21785, "hani": 39944, "hani</w>": 18374, "hank": 35993, "hank</w>": 17655, "hanks</w>": 29943, "hanley</w>": 47284, "hann": 5584, "hanna</w>": 10075, "hannah": 18622, "hannah</w>": 9142, "hannel</w>": 43477, "hanni": 19493, "hannibal</w>": 25149, "hannity</w>": 24569, "hannover</w>": 39976, "hanoi</w>": 36134, "hanover</w>": 33246, "hans": 35172, "hans</w>": 16628, "hansen</w>": 19729, "hanson</w>": 24602, "hant": 40641, "hanuk": 32774, "hanukkah</w>": 34247, "hanuman</w>": 46975, "hao</w>": 27184, "hap": 44981, "hap</w>": 47988, "happ": 784, "happen": 21486, "happen</w>": 4506, "happened</w>": 4402, "happening</w>": 4284, "happeningnow</w>": 43107, "happenings</w>": 41998, "happens</w>": 4988, "happier</w>": 14118, "happiest</w>": 13811, "happily</w>": 17316, "happiness</w>": 5096, "happy": 2952, "happy</w>": 900, "happybirthday": 9651, "happybirthday</w>": 12207, "happydays</w>": 25106, "happye": 33922, "happyeaster</w>": 38745, "happyfathersday</w>": 43534, "happyfriday</w>": 33340, "happyhalloween</w>": 28750, "happyholidays</w>": 32186, "happyhour</w>": 32036, "happymonday</w>": 47364, "happymothersday</w>": 42425, "happynewyear</w>": 18655, "happythanksgiving</w>": 40593, "happyvalentinesday</w>": 42403, "haps</w>": 9114, "haq</w>": 32445, "har": 915, "har</w>": 5888, "hara</w>": 10367, "haram": 35732, "haram</w>": 22950, "haran</w>": 27921, "harare</w>": 43562, "haras": 26644, "harass": 16481, "harassed</w>": 43067, "harassment</w>": 16641, "harat</w>": 28984, "harb": 5856, "harbaugh</w>": 45220, "harbor": 40686, "harbor</w>": 10202, "harbour": 35430, "harbour</w>": 10011, "harcourt</w>": 48093, "hard": 3312, "hard</w>": 1626, "hardcover</w>": 31123, "harden</w>": 27350, "harder</w>": 12274, "hardest</w>": 15258, "hardin</w>": 43802, "harding</w>": 24382, "hardly</w>": 17363, "hardro": 28126, "hardrock": 48365, "hardrock</w>": 40739, "hards</w>": 44048, "hardship</w>": 45085, "hardt</w>": 17922, "hardware</w>": 11957, "hardwell</w>": 45572, "hardwick</w>": 46864, "hardwood</w>": 28167, "hardwork": 42554, "hardwork</w>": 27404, "hardworking</w>": 28095, "hardworkpaysoff</w>": 49193, "hardy": 48179, "hardy</w>": 14113, "hare": 27903, "hare</w>": 18464, "harga</w>": 39738, "hari": 25472, "hari</w>": 8981, "harlan</w>": 49133, "harle": 29096, "harlem</w>": 17771, "harley": 24702, "harley</w>": 13632, "harleydavidson</w>": 39183, "harlow</w>": 34113, "harm": 16656, "harm</w>": 14452, "harman</w>": 42434, "harmed</w>": 39637, "harmful</w>": 21725, "harmless</w>": 44369, "harmon": 10828, "harmon</w>": 28729, "harmony</w>": 10785, "harms</w>": 46703, "harne": 43323, "harness</w>": 23205, "harold</w>": 16917, "harp</w>": 27339, "harper": 31288, "harper</w>": 12634, "harri": 6639, "harrier</w>": 37372, "harriet</w>": 27154, "harrington</w>": 34340, "harris": 25356, "harris</w>": 6925, "harrisburg</w>": 40590, "harrison": 34389, "harrison</w>": 10540, "harro": 18939, "harrogate</w>": 30842, "harrow</w>": 38807, "harry": 11094, "harry</w>": 3600, "harrypotter</w>": 23375, "harsh": 30596, "harsh</w>": 16944, "hart": 9335, "hart</w>": 7752, "hartford</w>": 23434, "harth</w>": 35619, "hartle": 47482, "hartley</w>": 31268, "hartman</w>": 43294, "haru</w>": 35099, "harvard": 28118, "harvard</w>": 12848, "harve": 6405, "harvest": 44495, "harvest</w>": 8971, "harvested</w>": 35899, "harvesting</w>": 26674, "harvey": 33289, "harvey</w>": 9586, "harvick</w>": 46983, "haryana</w>": 27661, "has": 13855, "has</w>": 791, "hasan</w>": 30049, "hasbro</w>": 37405, "hash": 6338, "hash</w>": 19199, "hashi</w>": 41831, "hashmi</w>": 35852, "hashtag": 34015, "hashtag</w>": 9238, "hashtags</w>": 23514, "haskell</w>": 48550, "hasn</w>": 9143, "hass": 9298, "hassan</w>": 15829, "hassee</w>": 37117, "hassel": 32204, "hassle</w>": 35762, "hast": 18146, "hasta</w>": 36623, "hastings</w>": 22035, "hat": 3447, "hat</w>": 3801, "hatch": 24202, "hatch</w>": 17809, "hatchback</w>": 42348, "hatched</w>": 42158, "hate": 23546, "hate</w>": 3753, "hated</w>": 21298, "hateful</w>": 36418, "hater</w>": 36917, "haters</w>": 14027, "hates</w>": 14957, "hatfield</w>": 38448, "hath": 27894, "hath</w>": 34416, "hathaway</w>": 31801, "hati</w>": 26045, "hating</w>": 25668, "hatred</w>": 19046, "hats</w>": 9812, "hatt": 8747, "hatton</w>": 44861, "hau": 5152, "hauer</w>": 48751, "haul": 23743, "haul</w>": 12332, "hauled</w>": 46620, "hauling</w>": 43132, "haun": 9676, "haunt</w>": 31039, "haunted</w>": 14944, "haunting</w>": 24034, "haunts</w>": 48035, "haus": 41755, "haus</w>": 16478, "hausen</w>": 33338, "hauser</w>": 46586, "haute</w>": 28854, "hav": 13443, "hav</w>": 20447, "havan</w>": 36304, "havana</w>": 23357, "havas": 46261, "have": 18053, "have</w>": 720, "haven": 33074, "haven</w>": 3871, "havent</w>": 29130, "haver": 27876, "haves</w>": 49088, "havin</w>": 31937, "having</w>": 1977, "havoc</w>": 24447, "haw": 2788, "haw</w>": 26954, "hawa": 6067, "hawa</w>": 46278, "hawai": 15800, "hawaii": 32413, "hawaii</w>": 8265, "hawaiian</w>": 17734, "hawan</w>": 27765, "hawk": 14704, "hawk</w>": 8218, "hawke</w>": 38178, "hawker</w>": 39051, "hawkeye</w>": 38666, "hawkeyes</w>": 34266, "hawking</w>": 33437, "hawkins</w>": 19740, "hawks": 44806, "hawks</w>": 5841, "hawthorn</w>": 45372, "hawthorne</w>": 36730, "hay": 4871, "hay</w>": 11367, "haya</w>": 41325, "hayat</w>": 49360, "hayden</w>": 19806, "haydn</w>": 48207, "haye": 36583, "hayes</w>": 13555, "hayley": 39986, "hayley</w>": 22204, "haynes</w>": 30496, "hays</w>": 41524, "hayward</w>": 29400, "haz": 5040, "haz</w>": 39921, "hazard": 26174, "hazard</w>": 15178, "hazardous</w>": 27102, "hazards</w>": 30639, "haze</w>": 22785, "hazel": 19838, "hazel</w>": 21882, "hazelnut</w>": 35816, "hazi</w>": 22740, "hazmat</w>": 48887, "hazrat</w>": 45775, "hazy</w>": 32655, "hb": 6854, "hb</w>": 12576, "hbcu</w>": 40008, "hbd": 25277, "hbd</w>": 13594, "hbo</w>": 15252, "hc": 15831, "hc</w>": 7821, "hcs</w>": 46850, "hd": 11601, "hd</w>": 4414, "hdd</w>": 40508, "hdmi</w>": 33302, "hdr</w>": 28065, "he": 651, "he</w>": 797, "hea": 27150, "hea</w>": 32790, "head": 1603, "head</w>": 1375, "headache</w>": 23849, "headaches</w>": 38025, "headband</w>": 28556, "headed</w>": 6153, "header</w>": 11077, "heading</w>": 4409, "headless</w>": 45219, "headlights</w>": 42422, "headline</w>": 10891, "headliner</w>": 38880, "headlines</w>": 14706, "headlining</w>": 26971, "headphone</w>": 37524, "headphones</w>": 14906, "headquarters</w>": 13041, "heads</w>": 5174, "headset</w>": 23883, "headshot</w>": 34890, "heal": 1231, "heal</w>": 13833, "healed</w>": 31456, "healer</w>": 38328, "healey</w>": 38985, "healing</w>": 9295, "heals</w>": 32384, "health": 2145, "health</w>": 1728, "healthand": 43704, "healthcare": 42500, "healthcare</w>": 6023, "healthier</w>": 18242, "healthtech</w>": 42694, "healthy": 10330, "healthy</w>": 3782, "healthye": 31532, "healthyeating</w>": 33761, "healthyfood</w>": 39996, "healthylifestyle</w>": 46254, "healthyliving</w>": 27293, "healy</w>": 34299, "heap</w>": 34781, "heaps</w>": 44446, "hear": 2749, "hear</w>": 2584, "heard</w>": 4063, "hearing": 46353, "hearing</w>": 5541, "hearings</w>": 33175, "hearn</w>": 36613, "hears</w>": 25395, "heart": 4975, "heart</w>": 1936, "heartbeat</w>": 29154, "heartbreak</w>": 29281, "heartbreaking</w>": 21322, "heartbroken</w>": 35383, "hearted</w>": 21679, "heartfelt</w>": 22904, "hearth": 31563, "hearthstone</w>": 34054, "hearti": 29345, "hearties</w>": 44572, "heartland</w>": 31923, "heartless</w>": 47022, "heartnews</w>": 40426, "hearts</w>": 5516, "heartw": 30002, "heartwarming</w>": 34080, "hearty</w>": 26994, "heat": 12175, "heat</w>": 4403, "heated</w>": 17057, "heater</w>": 23246, "heath": 12794, "heath</w>": 11719, "heather": 20230, "heather</w>": 12470, "heathrow</w>": 24171, "heating</w>": 12478, "heaton</w>": 34557, "heats</w>": 36106, "heatwave</w>": 25726, "heav": 2409, "heaven": 15520, "heaven</w>": 5545, "heavenly</w>": 19117, "heavens</w>": 26026, "heavier</w>": 31253, "heaviest</w>": 33268, "heavily</w>": 14123, "heavy": 12048, "heavy</w>": 4200, "heavymetal</w>": 39804, "heavyweight</w>": 17448, "heb": 24700, "heb</w>": 34515, "hebdo</w>": 41817, "hebrew</w>": 27298, "hebrides</w>": 45121, "hebron</w>": 45725, "hec": 18932, "heck": 22985, "heck</w>": 14427, "hectares</w>": 44162, "hectic</w>": 37245, "hector</w>": 25852, "hed": 18271, "hedge": 16229, "hedge</w>": 20294, "hedgehog</w>": 21940, "hedges</w>": 41345, "hee": 18364, "hee</w>": 15773, "heechul</w>": 42487, "heed</w>": 15118, "heel": 33646, "heel</w>": 16861, "heels</w>": 10909, "heem</w>": 30061, "heer</w>": 40473, "hef": 29473, "heff": 48756, "hefty</w>": 48584, "heg": 41995, "heh</w>": 25834, "hehe": 48723, "hehe</w>": 10658, "hehehe</w>": 24138, "hei": 6101, "hei</w>": 29051, "heidel": 42927, "heidelberg</w>": 48445, "heidi": 44860, "heidi</w>": 23867, "heifer</w>": 48219, "heigh": 43883, "height</w>": 10788, "heights</w>": 8418, "heim": 10931, "heim</w>": 9768, "heimer</w>": 39517, "hein": 15487, "hein</w>": 43206, "heine": 28742, "heineken</w>": 36874, "heinrich</w>": 47877, "heinz</w>": 32359, "heir": 27083, "heir</w>": 34007, "heirloom</w>": 34232, "heirs</w>": 43834, "heis": 21849, "heisman</w>": 34537, "heist</w>": 31035, "heit</w>": 37255, "hel": 919, "hel</w>": 11579, "hela</w>": 48212, "held</w>": 4042, "hele</w>": 46129, "helen": 17576, "helen</w>": 11291, "helena</w>": 23109, "helene</w>": 41591, "helens</w>": 45940, "heli": 33874, "heli</w>": 40183, "helicop": 10035, "helicopter</w>": 11956, "helicopters</w>": 26922, "helium</w>": 46505, "helix</w>": 35247, "hell": 8410, "hell</w>": 4141, "hella</w>": 19800, "hellboy</w>": 48428, "helle": 48600, "helle</w>": 46968, "hellenic</w>": 42544, "heller</w>": 44464, "hello": 12887, "hello</w>": 3306, "hells</w>": 47989, "helly</w>": 48690, "helm": 47970, "helm</w>": 19520, "helmet</w>": 11122, "helmets</w>": 21843, "help": 8641, "help</w>": 1318, "helped</w>": 4845, "helper</w>": 29321, "helpers</w>": 36316, "helpful</w>": 12695, "helping</w>": 3875, "helpless</w>": 47638, "helpline</w>": 43101, "helps</w>": 5144, "helsin": 17842, "helsinki</w>": 19626, "hem": 20270, "hem</w>": 11148, "hemi": 14256, "hemi</w>": 46856, "heming": 30819, "hemingway</w>": 33470, "hemisphere</w>": 32767, "hemmings</w>": 34882, "hemo": 43788, "hemp": 28225, "hemp</w>": 18467, "hems": 32451, "hemsworth</w>": 39428, "hen": 2385, "hen</w>": 8047, "hence</w>": 23640, "hend": 11560, "hender": 49248, "henderson</w>": 14348, "hendrick</w>": 45296, "hendricks</w>": 37588, "hendrix</w>": 23605, "henge</w>": 33104, "henley</w>": 27853, "henna</w>": 39455, "hennessy</w>": 42667, "henri": 19431, "henri</w>": 21610, "henrik</w>": 35772, "henry": 16018, "henry</w>": 5508, "hens</w>": 31742, "henson</w>": 32935, "hep": 17724, "hep</w>": 48791, "hepat": 23767, "hepatitis</w>": 32169, "hepburn</w>": 26348, "her": 1223, "her</w>": 899, "hera</w>": 38724, "heral": 37809, "herald": 27625, "herald</w>": 12851, "herb": 26116, "herb</w>": 15302, "herbal</w>": 21868, "herbali": 44087, "herbalife</w>": 48364, "herbert</w>": 19935, "herbs</w>": 17320, "hercules</w>": 26539, "herd": 36142, "herd</w>": 18589, "here": 9134, "here</w>": 763, "hered": 47976, "hereford</w>": 35543, "heres</w>": 13566, "hereto": 47673, "heri</w>": 31392, "herit": 4720, "heritag": 38273, "heritage": 20962, "heritage</w>": 5455, "herman": 31890, "herman</w>": 21568, "hermann</w>": 40942, "hermes</w>": 34563, "hermi": 35265, "hermione</w>": 45502, "hermit</w>": 43953, "hermitage</w>": 47706, "hermo": 40967, "hermosa</w>": 42531, "hern": 30571, "hern</w>": 43576, "hernandez</w>": 17707, "hero": 7338, "hero</w>": 3756, "heroes": 38010, "heroes</w>": 5506, "heroic</w>": 24255, "heroin</w>": 23841, "heroine</w>": 27420, "heron</w>": 22593, "heros</w>": 37642, "herr</w>": 38537, "herrera</w>": 27755, "herring</w>": 30211, "hers</w>": 25359, "herself</w>": 9207, "hersh": 20379, "hershey</w>": 29734, "hert": 26744, "hertfordshire</w>": 41070, "herts</w>": 35784, "herty</w>": 23454, "hertz</w>": 49383, "hes": 30553, "hes</w>": 12784, "hesit": 23933, "hesitate</w>": 34967, "hess</w>": 41888, "hester</w>": 31105, "het": 37527, "het</w>": 19678, "hetero": 26405, "heu": 20105, "heughan</w>": 32298, "hew": 48141, "hew</w>": 43051, "hewitt</w>": 28871, "hex": 16255, "hex</w>": 31241, "hey": 10759, "hey</w>": 2189, "hez": 34591, "hezbollah</w>": 37636, "hf": 26606, "hf</w>": 20603, "hfx": 47297, "hg": 23986, "hg</w>": 26237, "hgtv</w>": 47657, "hh": 3280, "hh</w>": 5180, "hhh</w>": 8281, "hhhh": 19391, "hhhh</w>": 13121, "hhhhh</w>": 24246, "hhhhhh</w>": 37278, "hhs</w>": 27006, "hi": 677, "hi</w>": 1883, "hia</w>": 20672, "hiatus</w>": 27823, "hib": 15922, "hiber": 38799, "hibis": 36226, "hibiscus</w>": 36460, "hibition</w>": 24658, "hibs</w>": 42814, "hic": 3549, "hic</w>": 38079, "hick": 14813, "hickman</w>": 49148, "hickory</w>": 29905, "hicks</w>": 23429, "hid": 15552, "hid</w>": 14451, "hidalgo</w>": 47464, "hidden": 28305, "hidden</w>": 7029, "hiddleston</w>": 31444, "hide": 17725, "hide</w>": 9379, "hideous</w>": 46588, "hides</w>": 30800, "hiding</w>": 11371, "hie</w>": 15763, "hier": 23433, "hier</w>": 29913, "hierarchy</w>": 44442, "hifi</w>": 38168, "hig": 38108, "higgins</w>": 21783, "high": 1487, "high</w>": 1400, "higher</w>": 5321, "highered</w>": 27072, "highest</w>": 5317, "highland": 32244, "highland</w>": 16062, "highlander</w>": 46251, "highlanders</w>": 40445, "highlands</w>": 16883, "highlight</w>": 8264, "highlighted</w>": 22252, "highlighter</w>": 45460, "highlighting</w>": 17344, "highlights</w>": 6173, "highly</w>": 5302, "highness</w>": 38694, "highs</w>": 15144, "highschool</w>": 23102, "highway": 45344, "highway</w>": 7620, "highways</w>": 28007, "higu": 39115, "hihi</w>": 36240, "hii</w>": 42315, "hijab</w>": 31407, "hika</w>": 41356, "hikari</w>": 44624, "hike</w>": 9404, "hiked</w>": 36471, "hiker</w>": 40947, "hikers</w>": 46090, "hikes</w>": 27076, "hiking</w>": 9118, "hiko</w>": 48708, "hil": 3508, "hil</w>": 17927, "hila</w>": 38837, "hilar": 37337, "hilari": 7784, "hilarious</w>": 8358, "hilariously</w>": 43476, "hilary": 45898, "hilary</w>": 25415, "hilde": 45382, "hill": 3671, "hill</w>": 2682, "hillary": 13257, "hillary</w>": 7074, "hillaryclinton</w>": 15357, "hilli": 32513, "hills": 24178, "hills</w>": 5289, "hillsborough</w>": 32157, "hillside</w>": 37194, "hilltop</w>": 45858, "hilly</w>": 32483, "hilton": 33621, "hilton</w>": 14012, "him": 4128, "him</w>": 1269, "himach": 29132, "himachal</w>": 35461, "himalay": 17552, "himalayan</w>": 30318, "himalayas</w>": 32872, "hime</w>": 45892, "himself</w>": 4530, "himss</w>": 41730, "hin": 1676, "hin</w>": 37930, "hina</w>": 40571, "hinakhan</w>": 45518, "hinch": 49320, "hind": 34460, "hind</w>": 23293, "hindi</w>": 14967, "hinds</w>": 47859, "hindu": 17587, "hindu</w>": 12053, "hinduism</w>": 40592, "hindus</w>": 25701, "hindustan</w>": 46553, "hines</w>": 37462, "hing": 37968, "hini</w>": 33564, "hino</w>": 45343, "hint</w>": 11868, "hinton</w>": 47165, "hints</w>": 20594, "hio</w>": 32897, "hip": 11725, "hip</w>": 6584, "hipho": 8819, "hiphop": 26598, "hiphop</w>": 10914, "hipp": 13607, "hippie</w>": 28637, "hippo": 28398, "hippo</w>": 36729, "hips</w>": 30191, "hipstamatic</w>": 31002, "hipster</w>": 19987, "hipsters</w>": 48265, "hir": 4959, "hir</w>": 14728, "hira</w>": 42577, "hire": 32356, "hire</w>": 8243, "hired</w>": 17602, "hires</w>": 24133, "hiring</w>": 7835, "hiro": 17396, "hiro</w>": 20588, "hiroshima</w>": 33867, "hirsch</w>": 46967, "his": 15211, "his</w>": 787, "hism</w>": 23502, "hispan": 16843, "hispanic</w>": 22676, "hist": 21710, "hist</w>": 13779, "histo": 33479, "histor": 2993, "historia</w>": 46010, "historian</w>": 20697, "historians</w>": 35200, "historic": 30195, "historic</w>": 5726, "historical": 34154, "historical</w>": 8039, "historically</w>": 30445, "histories</w>": 34736, "history": 11142, "history</w>": 1695, "historymonth</w>": 19356, "historyof": 35905, "hit": 5453, "hit</w>": 2341, "hitch": 22937, "hitch</w>": 36203, "hitler</w>": 16518, "hitman</w>": 33290, "hits</w>": 4712, "hitter</w>": 23538, "hitters</w>": 39724, "hitting</w>": 7957, "hiv": 44410, "hiv</w>": 11018, "hive": 38162, "hive</w>": 18521, "hiya</w>": 42393, "hk": 22648, "hk</w>": 12307, "hl": 8297, "hl</w>": 5956, "hle</w>": 32389, "hler</w>": 35418, "hm": 17913, "hm</w>": 7631, "hmm</w>": 13725, "hmmm</w>": 17032, "hmmmm</w>": 34598, "hms</w>": 14625, "hmu</w>": 21630, "hmv</w>": 49288, "hn": 22905, "hn</w>": 7478, "hns</w>": 48412, "ho": 606, "ho</w>": 2971, "hoa</w>": 37517, "hoar": 31628, "hoax</w>": 33438, "hob": 18212, "hobart</w>": 31646, "hobb": 16175, "hobbies</w>": 36370, "hobbit</w>": 23207, "hobbs</w>": 34343, "hobby": 41120, "hobby</w>": 17557, "hobo": 34613, "hobo</w>": 41334, "hoboken</w>": 41568, "hoc</w>": 35880, "hoch": 43772, "hock": 34914, "hock</w>": 46574, "hockey": 16499, "hockey</w>": 4111, "hoco</w>": 34771, "hod": 31062, "hodg": 23660, "hodge</w>": 40585, "hodges</w>": 35061, "hodgson</w>": 37044, "hoe": 32502, "hoe</w>": 11262, "hoek</w>": 40073, "hoes</w>": 21164, "hof": 20186, "hof</w>": 12789, "hofer</w>": 38654, "hoff": 32860, "hoff</w>": 22751, "hofficial</w>": 41949, "hoffman</w>": 22026, "hog": 12075, "hog</w>": 13255, "hogan</w>": 19757, "hogg</w>": 42005, "hogs</w>": 23242, "hogwarts</w>": 29168, "hoh</w>": 43947, "hoi": 39295, "hok": 26942, "hok</w>": 47167, "hokies</w>": 35168, "hokkaido</w>": 49145, "hol": 1187, "hol</w>": 7349, "hola</w>": 28724, "hold": 36496, "hold</w>": 3254, "holden</w>": 21869, "holder</w>": 7862, "holders</w>": 10074, "holding</w>": 5050, "holdings</w>": 24832, "holds</w>": 7286, "hole": 47242, "hole</w>": 5341, "holes</w>": 11266, "holi": 2093, "holi</w>": 21926, "holic</w>": 16348, "holics</w>": 29782, "holiday": 13168, "holiday</w>": 2878, "holidays</w>": 5372, "holiness</w>": 37259, "holistic</w>": 26300, "holl": 27699, "holla</w>": 26500, "holland": 31608, "holland</w>": 9978, "hollande</w>": 47690, "holler</w>": 49047, "holli": 24019, "holliday</w>": 41624, "hollow": 41221, "hollow</w>": 16691, "holloway</w>": 29435, "holly": 12731, "holly</w>": 11923, "hollyo": 41525, "hollyoaks</w>": 43352, "hollywood": 24655, "hollywood</w>": 5518, "holm": 34758, "holm</w>": 12739, "holme</w>": 46149, "holmes</w>": 12756, "holo": 10317, "holocau": 14688, "holocaust</w>": 16476, "hols</w>": 33344, "holt</w>": 18868, "holtz</w>": 44743, "holy": 13910, "holy</w>": 4874, "hom": 906, "hom</w>": 47397, "homa</w>": 9557, "homage</w>": 17746, "home": 2143, "home</w>": 1137, "homebrew</w>": 35046, "homec": 33869, "homecoming</w>": 9008, "homedecor</w>": 15695, "homedepot</w>": 38707, "homegrown</w>": 32554, "homeitems</w>": 42972, "homeland</w>": 21633, "homeless": 18403, "homeless</w>": 9661, "homelessness</w>": 19851, "homemade</w>": 7889, "homeof": 48856, "homeowner</w>": 37267, "homeowners</w>": 29882, "homepage</w>": 29828, "homer": 29307, "homer</w>": 16931, "homers</w>": 38333, "homes": 19480, "homes</w>": 5416, "homeschool</w>": 40994, "homestead</w>": 32609, "homeswee": 46298, "hometown</w>": 12238, "homework</w>": 12495, "homicide</w>": 21520, "homie</w>": 12540, "homies</w>": 18893, "homme</w>": 26193, "homo": 18129, "homo</w>": 30504, "homophobia</w>": 37875, "homophobic</w>": 40975, "homosexual</w>": 44288, "homosexuality</w>": 46720, "homs</w>": 45413, "hon": 1279, "hon</w>": 10296, "honda</w>": 8553, "honduras</w>": 29715, "hone</w>": 38640, "honest": 7814, "honest</w>": 9602, "honestly</w>": 9155, "honesty</w>": 24939, "honey": 9843, "honey</w>": 6406, "honeycomb</w>": 48583, "honeymoon</w>": 22527, "hong": 12144, "hong</w>": 8598, "hongkong</w>": 16659, "honi</w>": 17918, "honolulu</w>": 28096, "honor": 9206, "honor</w>": 3402, "honorable</w>": 19498, "honorary</w>": 15675, "honore": 25868, "honored</w>": 5494, "honoree</w>": 38993, "honorees</w>": 43012, "honoring</w>": 10771, "honors</w>": 10248, "honour</w>": 8240, "honourable</w>": 29855, "honoured</w>": 11945, "honouring</w>": 37754, "honours</w>": 22558, "hoo": 2300, "hoo</w>": 7920, "hood": 18681, "hood</w>": 3222, "hooded</w>": 33631, "hoodie</w>": 13444, "hoodies</w>": 25974, "hoods</w>": 16664, "hoof</w>": 44555, "hook": 30488, "hook</w>": 10395, "hookah</w>": 34214, "hooked</w>": 18138, "hookem</w>": 31465, "hooker</w>": 37891, "hooking</w>": 35240, "hooks</w>": 25068, "hooligans</w>": 48176, "hoon</w>": 21368, "hooo</w>": 44538, "hoop": 31516, "hoop</w>": 19573, "hooper</w>": 35221, "hoops</w>": 9351, "hoor": 22155, "hooray</w>": 24940, "hoos</w>": 46462, "hoosier</w>": 48886, "hoosiers</w>": 42780, "hoot</w>": 29164, "hoover</w>": 25691, "hop": 10848, "hop</w>": 5833, "hope": 5263, "hope</w>": 1683, "hoped</w>": 30628, "hopeful</w>": 21453, "hopefully</w>": 7602, "hopeless</w>": 35586, "hopes</w>": 10018, "hoping</w>": 7207, "hopkins</w>": 17821, "hopp": 48839, "hopped</w>": 34220, "hopper</w>": 21748, "hopping</w>": 27606, "hoppy</w>": 38359, "hops</w>": 21137, "hor": 1407, "hor</w>": 33847, "hora</w>": 26013, "horace</w>": 39282, "horan</w>": 26857, "horde</w>": 44947, "hore</w>": 15380, "horiz": 8144, "horizon": 17924, "horizon</w>": 11920, "horizons</w>": 29685, "horizontal</w>": 25775, "hormon": 27096, "hormone</w>": 31283, "hormones</w>": 35162, "horn": 15771, "horn</w>": 9607, "horne</w>": 38143, "horned</w>": 34526, "hornet</w>": 28739, "hornets</w>": 20124, "horns</w>": 22109, "horny</w>": 32622, "horo": 21500, "horoscope</w>": 38453, "horowitz</w>": 44669, "horri": 8656, "horrible</w>": 13726, "horribly</w>": 45484, "horrific</w>": 25314, "horrifying</w>": 38901, "horror": 13787, "horror</w>": 5032, "horrormovies</w>": 46682, "horrors</w>": 33321, "horse": 8562, "horse</w>": 4558, "horseback</w>": 43673, "horseman</w>": 48885, "horsepower</w>": 36882, "horser": 23096, "horseracing</w>": 30693, "horses</w>": 8809, "horseshoe</w>": 29242, "horst</w>": 37182, "hort</w>": 19482, "horticul": 27141, "horticulture</w>": 39998, "horton</w>": 25945, "hortons</w>": 38422, "horus</w>": 29794, "hos": 44320, "hos</w>": 25008, "hosa</w>": 44618, "hose</w>": 19662, "hoseok</w>": 38817, "hosp": 2847, "hosp</w>": 37853, "hospice</w>": 20533, "hospit": 7180, "hospital": 29399, "hospital</w>": 3851, "hospitality</w>": 11657, "hospitalized</w>": 36915, "hospitals</w>": 13816, "host": 17403, "host</w>": 3953, "hostage</w>": 26119, "hoste": 31700, "hosted</w>": 6017, "hostel</w>": 27225, "hostess</w>": 39692, "hostile</w>": 28074, "hosting</w>": 4857, "hosts</w>": 8718, "hot": 2851, "hot</w>": 2069, "hota</w>": 43289, "hotdog</w>": 43758, "hotel": 14591, "hotel</w>": 2738, "hotels</w>": 8654, "hotline</w>": 30516, "hotmail</w>": 46427, "hotness</w>": 39803, "hotra</w>": 27109, "hotro": 47823, "hotspot</w>": 36606, "hotspur</w>": 35176, "hotter</w>": 23591, "hottest</w>": 8279, "hottie</w>": 22804, "hotties</w>": 46027, "hou": 1011, "hou</w>": 10122, "hough</w>": 44529, "houghton</w>": 36133, "houn": 39273, "houn</w>": 33607, "hound": 33996, "hound</w>": 13561, "hounds</w>": 21178, "hounews</w>": 48373, "hour": 14930, "hour</w>": 2232, "hourly</w>": 30918, "hours</w>": 2382, "house": 4107, "house</w>": 1212, "housed</w>": 37518, "household</w>": 12412, "households</w>": 27167, "housel": 48685, "housemusic</w>": 28468, "houseof": 19928, "houses</w>": 7791, "housewives</w>": 38523, "housing": 32924, "housing</w>": 5734, "houston": 16564, "houston</w>": 5663, "hov</w>": 40291, "hove</w>": 29674, "hoven</w>": 35559, "hover": 36252, "hover</w>": 49016, "hovering</w>": 43437, "how": 7470, "how</w>": 829, "howar": 37672, "howard": 25447, "howard</w>": 7632, "howdy</w>": 42216, "howe": 8179, "howe</w>": 24614, "howell</w>": 25297, "hower</w>": 32920, "however</w>": 8467, "howi": 47883, "howie</w>": 42939, "howl</w>": 40332, "howling</w>": 41771, "howto": 38191, "howto</w>": 44060, "hoy": 39625, "hoy</w>": 13278, "hoya</w>": 40978, "hp": 23753, "hp</w>": 6371, "hpa</w>": 30983, "hpc</w>": 39936, "hpe</w>": 33787, "hpv</w>": 45765, "hq": 33571, "hq</w>": 4693, "hr": 4810, "hr</w>": 4086, "hra": 21320, "hra</w>": 17212, "hrc</w>": 18139, "hrh</w>": 29103, "hri": 21068, "hrithik</w>": 45371, "hrs</w>": 7157, "hru</w>": 24127, "hrw</w>": 25064, "hs": 9343, "hs</w>": 2466, "hsbc</w>": 31508, "hsc</w>": 43510, "hse</w>": 34057, "hsfb</w>": 29539, "hsv</w>": 47311, "ht": 11123, "ht</w>": 7801, "hta": 23452, "hta</w>": 49384, "htafc</w>": 42821, "htc": 48942, "htc</w>": 17635, "html</w>": 18231, "hts</w>": 43710, "htt": 10620, "http</w>": 15066, "https</w>": 30901, "httr</w>": 49372, "httweets</w>": 43198, "hu": 845, "hu</w>": 5949, "hua</w>": 22138, "huan</w>": 41405, "huang</w>": 32013, "huar": 46916, "huawe": 17709, "huawei</w>": 21128, "hub": 18775, "hub</w>": 7028, "hubb": 23183, "hubbard</w>": 33288, "hubble</w>": 30421, "hubby</w>": 16947, "hubert</w>": 40699, "hubs</w>": 29327, "huck": 22909, "huckabee</w>": 43666, "hud": 7169, "hud</w>": 28563, "hudder": 22629, "huddersfield</w>": 24220, "huddle</w>": 33435, "hudson": 25873, "hudson</w>": 11260, "hue": 48380, "hue</w>": 21465, "hues</w>": 38003, "huey</w>": 39663, "huff": 18746, "huff</w>": 44999, "huffpost": 45887, "hug": 40790, "hug</w>": 10359, "huge</w>": 2699, "hugely</w>": 24648, "hugged</w>": 41333, "hugging</w>": 27058, "hugh": 8723, "hugh</w>": 15385, "hughes</w>": 11418, "hugo": 43935, "hugo</w>": 17132, "hugs</w>": 14248, "huh</w>": 13348, "huhu</w>": 32134, "hui</w>": 29978, "hul": 7911, "hula</w>": 40145, "hulk</w>": 17637, "hull": 25154, "hull</w>": 10375, "hulu</w>": 24666, "hum": 5823, "hum</w>": 16283, "human": 3175, "human</w>": 2751, "humane</w>": 20220, "humanitarian</w>": 14170, "humanities</w>": 24949, "humanity</w>": 9420, "humanright": 44385, "humanrights</w>": 14148, "humans</w>": 8324, "humb": 9988, "humber": 30602, "humber</w>": 38063, "humble": 38703, "humble</w>": 10889, "humbled</w>": 19682, "humbling</w>": 39757, "humbold": 24739, "humboldt</w>": 31389, "hume</w>": 38197, "humid": 14778, "humid</w>": 27447, "humidi": 47666, "humidity</w>": 15469, "humil": 27205, "humili": 25332, "humility</w>": 28535, "humming": 26515, "hummingbird</w>": 33072, "hummus</w>": 31785, "humor": 29369, "humor</w>": 11186, "humorous</w>": 38173, "humour</w>": 19161, "hump": 16673, "hump</w>": 24529, "humpback</w>": 47662, "humpday</w>": 27693, "humph": 19767, "humphrey</w>": 31549, "hun": 1616, "hun</w>": 10795, "hundre": 8505, "hundred</w>": 11898, "hundreds</w>": 8879, "hung</w>": 13825, "hungar": 19420, "hungarian</w>": 23325, "hungary</w>": 17232, "hunger": 25565, "hunger</w>": 10184, "hungergames</w>": 47507, "hungover</w>": 41110, "hungry": 44845, "hungry</w>": 8451, "hunk</w>": 33912, "hunt": 16498, "hunt</w>": 5774, "hunted</w>": 37373, "hunter": 16531, "hunter</w>": 6099, "hunters</w>": 16115, "hunting": 27830, "hunting</w>": 7507, "huntington</w>": 23521, "hunts</w>": 34041, "huntsville</w>": 34544, "hur": 2305, "hur</w>": 34523, "hurd</w>": 44915, "hurdle</w>": 27486, "hurdles</w>": 25440, "huri</w>": 42486, "hurley</w>": 30166, "hurling</w>": 24738, "huron</w>": 36147, "hurrah</w>": 40599, "hurric": 6543, "hurrican": 36105, "hurricane": 24051, "hurricane</w>": 8782, "hurricanes</w>": 22357, "hurry</w>": 10921, "hurst": 44742, "hurst</w>": 11760, "hurt</w>": 7413, "hurting</w>": 24017, "hurts</w>": 13059, "hus": 5111, "hus</w>": 35853, "husband</w>": 6179, "husbands</w>": 33612, "hush</w>": 28728, "husk": 19246, "huskers</w>": 26946, "huskies</w>": 20988, "husky</w>": 20421, "huss": 13733, "hussain</w>": 17940, "hussein</w>": 31336, "hust": 27279, "hustle</w>": 15709, "huston</w>": 46480, "hut": 20924, "hut</w>": 16503, "hutch": 31018, "hutch</w>": 33203, "hutchinson</w>": 35721, "hutto": 27662, "hutton</w>": 38321, "hv": 17209, "hv</w>": 18593, "hvac</w>": 27492, "hw": 27491, "hw</w>": 18876, "hwa</w>": 32352, "hwan</w>": 44390, "hwang</w>": 46775, "hwy</w>": 13812, "hy": 1441, "hy</w>": 17827, "hya</w>": 31600, "hyacin": 47263, "hyatt": 44856, "hyatt</w>": 25146, "hybri": 9084, "hybrid</w>": 10156, "hyd</w>": 42382, "hyde": 46484, "hyde</w>": 16343, "hyder": 13960, "hyderabad</w>": 14801, "hydr": 8031, "hydra": 44414, "hydra</w>": 40420, "hydrange": 43298, "hydrate</w>": 29628, "hydrated</w>": 23300, "hydrating</w>": 47653, "hydration</w>": 24174, "hydrau": 26017, "hydraulic</w>": 26189, "hydro": 8368, "hydro</w>": 22595, "hydrogen</w>": 20974, "hye": 32724, "hye</w>": 25792, "hygi": 16277, "hygiene</w>": 19591, "hymn</w>": 41350, "hyo": 38960, "hyo</w>": 35078, "hyp": 16964, "hype": 30353, "hype</w>": 11111, "hyped</w>": 22507, "hyper": 7997, "hyper</w>": 22146, "hypertension</w>": 40698, "hypno": 23355, "hypnosis</w>": 48138, "hypnoti": 40440, "hypo": 10252, "hypocr": 30711, "hypocri": 25606, "hypocrisy</w>": 26296, "hypocrite</w>": 44125, "hypothe": 46966, "hypothesis</w>": 44956, "hyster": 24235, "hysteria</w>": 45965, "hysterical</w>": 48627, "hyuk</w>": 20452, "hyun": 11831, "hyun</w>": 8589, "hyundai</w>": 17094, "hyung": 46901, "hyung</w>": 16551, "hz</w>": 32533, "i": 72, "i</w>": 328, "ia": 12486, "ia</w>": 1073, "iac": 32838, "iac</w>": 44063, "iaf</w>": 40789, "iah</w>": 35052, "iain</w>": 30103, "ial": 11530, "ial</w>": 1974, "ials</w>": 20940, "iam": 3579, "iam</w>": 11415, "iambic": 43668, "iambicpent</w>": 43891, "iamsrk</w>": 15103, "ian": 7723, "ian</w>": 1800, "ians</w>": 6451, "iansomerhalder</w>": 47077, "iart": 18413, "iartg</w>": 18669, "ias": 32303, "ias</w>": 14620, "ib": 3962, "ib</w>": 13554, "iba</w>": 39763, "ibadan</w>": 44691, "iban": 47145, "ibc</w>": 49014, "ibd</w>": 40732, "iber": 23814, "ibi": 12337, "ibis</w>": 47048, "ibiza</w>": 13853, "ible</w>": 37792, "ibles</w>": 44102, "ibm": 23415, "ibm</w>": 13918, "ibn</w>": 25729, "ibooks</w>": 46887, "ibra": 15476, "ibrahi": 40350, "ibrahim</w>": 20816, "ibrox</w>": 46883, "ibs</w>": 41993, "ibu": 43587, "ibu</w>": 46117, "ic": 535, "ic</w>": 1029, "ica</w>": 2576, "icago</w>": 37492, "ical": 6082, "ical</w>": 1110, "ically</w>": 3161, "icals</w>": 13999, "ican": 17653, "ican</w>": 5246, "icans</w>": 20511, "icar": 37211, "ication</w>": 21629, "icc</w>": 12945, "ice": 2739, "ice</w>": 733, "iceberg</w>": 33662, "icec": 13636, "icecream</w>": 21334, "iced</w>": 8049, "icelan": 34114, "iceland": 46716, "iceland</w>": 11935, "icelandic</w>": 34705, "ices</w>": 1931, "ich": 5333, "ich</w>": 1232, "icha": 31453, "iche": 28972, "iche</w>": 21143, "ichi": 21669, "ichi</w>": 14647, "ichick</w>": 45022, "ichiro</w>": 43787, "ici": 948, "ici</w>": 22189, "icia</w>": 11774, "icial": 17543, "icial</w>": 6397, "ician": 40522, "ician</w>": 5374, "icians</w>": 6264, "iciary</w>": 21329, "icic": 46006, "icide</w>": 6558, "icides</w>": 28253, "icing</w>": 7676, "icio</w>": 24207, "icion": 45905, "icious</w>": 3325, "icist</w>": 21165, "icists</w>": 42171, "icity</w>": 7243, "ick": 1168, "ick</w>": 1068, "icked</w>": 39799, "icker</w>": 40357, "ickers</w>": 30701, "icki</w>": 35468, "icking</w>": 6619, "icks</w>": 3727, "icky</w>": 11587, "icn</w>": 44516, "ico": 13697, "ico</w>": 3040, "icom": 17693, "icom</w>": 29796, "icon": 13843, "icon</w>": 5646, "iconic</w>": 6959, "icons</w>": 15553, "icop": 9389, "icos</w>": 32002, "ics</w>": 1324, "ict</w>": 6349, "icted</w>": 36515, "iction</w>": 40560, "icton</w>": 36548, "icu": 45118, "icu</w>": 30443, "icular</w>": 40660, "icus</w>": 31459, "icy": 28780, "icy</w>": 3495, "icymi</w>": 5315, "icz</w>": 46387, "id": 1568, "id</w>": 1014, "ida": 11032, "ida</w>": 11600, "idad</w>": 22462, "idaho": 48817, "idaho</w>": 15165, "idal</w>": 39684, "idan</w>": 17929, "idc</w>": 22386, "ide": 1909, "ide</w>": 14104, "idea</w>": 3612, "ideal</w>": 8789, "ideally</w>": 48247, "ideals</w>": 45096, "ideas</w>": 4452, "ident": 7113, "identi": 6009, "identical</w>": 25587, "identification</w>": 23337, "identified</w>": 15217, "identifies</w>": 35712, "identify</w>": 10949, "identifying</w>": 23589, "identities</w>": 34292, "identity</w>": 8892, "ideology</w>": 25840, "iders</w>": 8980, "ides</w>": 31791, "idf</w>": 28987, "idge</w>": 35567, "idh</w>": 44325, "idi": 9611, "idi</w>": 14264, "idio": 15994, "idiot</w>": 14087, "idiots</w>": 20856, "idk</w>": 8972, "idle</w>": 34754, "idlib</w>": 36199, "ido": 6763, "ido</w>": 29641, "idol": 24866, "idol</w>": 8884, "idols</w>": 21398, "idr</w>": 10106, "idri": 46435, "idris</w>": 41312, "ids</w>": 6111, "idu</w>": 28655, "idy": 33058, "idyl": 44879, "idyllic</w>": 46632, "ie": 6789, "ie</w>": 1718, "iec</w>": 44773, "ied</w>": 10059, "ieee</w>": 39860, "iel": 27875, "iel</w>": 22729, "ience</w>": 1542, "ient</w>": 13115, "ier": 33173, "ier</w>": 5912, "iers</w>": 45060, "ies": 27912, "ies</w>": 963, "iest</w>": 10818, "if": 8063, "if</w>": 878, "ifa</w>": 37574, "ifc</w>": 36524, "ife": 41172, "ife</w>": 19590, "iff": 35753, "ification</w>": 35755, "ified</w>": 41403, "ift": 31143, "iftar</w>": 35153, "ifu</w>": 41523, "ify</w>": 32807, "ig": 1089, "ig</w>": 3072, "iga</w>": 16493, "igan</w>": 27468, "igans</w>": 25419, "igbo</w>": 44591, "ige</w>": 10806, "igen</w>": 33070, "iger": 30758, "iger</w>": 20685, "igers": 40755, "igers</w>": 48928, "iggy": 46219, "iggy</w>": 27604, "igh": 2712, "igh</w>": 5451, "ight": 14571, "ight</w>": 897, "ighton</w>": 35292, "igi</w>": 21901, "igle": 29912, "iglesias</w>": 39432, "ign": 7303, "ign</w>": 2326, "ignati": 37573, "ignatius</w>": 48318, "igne</w>": 45843, "ignite</w>": 25210, "ignition</w>": 36115, "igno": 15375, "ignor": 7653, "ignorance</w>": 22735, "ignorant</w>": 26933, "ignore</w>": 12304, "ignored</w>": 20428, "ignores</w>": 40129, "ignoring</w>": 23969, "igor</w>": 33024, "igs</w>": 31344, "igu": 21279, "ih": 12162, "ih</w>": 34135, "ihear": 13043, "iheart": 30332, "iheartawards</w>": 18811, "iheartradio</w>": 25934, "ihop</w>": 45511, "ihri": 39108, "ihrithik</w>": 39326, "ii": 5103, "ii</w>": 2329, "iii": 46236, "iii</w>": 6572, "iiii": 20133, "iiii</w>": 45393, "iiot</w>": 30704, "iit": 39330, "iit</w>": 33238, "ij": 7337, "ija</w>": 42802, "ik": 3903, "ik</w>": 10177, "ika</w>": 18188, "ike": 12329, "ike</w>": 19696, "ikea</w>": 20528, "iker</w>": 38653, "ikh": 44655, "ikh</w>": 12758, "iklan": 32028, "iklan</w>": 29584, "iko": 35659, "iko</w>": 39272, "ikon": 38543, "ikon</w>": 19156, "iku</w>": 17780, "il": 543, "il</w>": 958, "ila</w>": 4344, "ilah</w>": 32211, "ilan": 13889, "ilan</w>": 28076, "iland</w>": 20957, "ilation</w>": 16180, "ilay": 45093, "ild": 22278, "ild</w>": 17164, "ile": 18398, "ile</w>": 989, "iled</w>": 3358, "iler": 22446, "iler</w>": 3615, "ilers</w>": 8975, "iles</w>": 42274, "ili": 2076, "ili</w>": 19601, "ilia</w>": 14855, "ilian</w>": 10272, "iliary</w>": 32585, "ilife</w>": 42835, "ilike": 44989, "ilinan</w>": 48497, "iling</w>": 3299, "ilio</w>": 47256, "ilion</w>": 12561, "ilis</w>": 43442, "ilit": 11178, "ilities</w>": 5446, "ility</w>": 1787, "ilive</w>": 26478, "ill": 828, "ill</w>": 660, "illa": 8877, "illa</w>": 3043, "illac</w>": 17218, "illage</w>": 48922, "illard</w>": 21920, "illary</w>": 33667, "illas</w>": 23404, "ille": 18213, "ille</w>": 5559, "illed</w>": 2527, "illeg": 35808, "illegal</w>": 7983, "illegally</w>": 24466, "illegals</w>": 40490, "iller": 23341, "iller</w>": 2956, "illers</w>": 30547, "illery</w>": 14514, "illes</w>": 20037, "illi": 1086, "illi</w>": 25187, "illia</w>": 48776, "illiams</w>": 30301, "illian": 48775, "illian</w>": 17355, "illic": 37152, "illicit</w>": 40998, "illie</w>": 26083, "illin</w>": 35868, "illing</w>": 2803, "illini</w>": 28957, "illino": 8920, "illinois</w>": 9414, "illion": 35542, "illion</w>": 2035, "illness</w>": 11145, "illnesses</w>": 33861, "illo": 34153, "illo</w>": 7588, "illon</w>": 20516, "ills</w>": 1900, "illu": 3025, "illumin": 11446, "illuminate</w>": 43261, "illuminated</w>": 28814, "illuminati</w>": 34551, "illuminating</w>": 46601, "illumination</w>": 43680, "illus</w>": 41386, "illusion</w>": 20318, "illusions</w>": 47429, "illustr": 6268, "illustrate</w>": 37468, "illustrated</w>": 13151, "illustrates</w>": 38129, "illustrating</w>": 43322, "illustration</w>": 6052, "illustrations</w>": 17852, "illustrator</w>": 16649, "illustri": 43116, "illustrious</w>": 44304, "illy": 11707, "illy</w>": 9532, "ilm</w>": 36326, "ilo": 4220, "ilo</w>": 14835, "ilove": 7183, "ilove</w>": 32914, "iloveart</w>": 41114, "ilovemy": 28863, "iloveyou</w>": 28829, "ils</w>": 1543, "ilt</w>": 25334, "ilton</w>": 28494, "ilu": 27337, "ilwx</w>": 43777, "ily": 4881, "ily</w>": 1026, "ilya</w>": 33377, "ilysm</w>": 29228, "im": 732, "im</w>": 1496, "ima": 2414, "ima</w>": 6432, "imac</w>": 40675, "imacele": 47281, "imag": 2316, "image": 24101, "image</w>": 2867, "imagery</w>": 22828, "images</w>": 4952, "imagin": 18178, "imaginary</w>": 30417, "imagination</w>": 13783, "imaginative</w>": 47233, "imagine": 35752, "imagine</w>": 4826, "imagined</w>": 18478, "imagines</w>": 47379, "imaging</w>": 14231, "imagining</w>": 27384, "imam": 37552, "imam</w>": 19024, "iman": 45684, "iman</w>": 16247, "imation</w>": 44566, "imax</w>": 32066, "imc</w>": 45616, "imdanielpadilla</w>": 36357, "imdb</w>": 30407, "ime": 44937, "ime</w>": 31151, "imel": 31594, "iment</w>": 37157, "imer</w>": 21802, "imes</w>": 47744, "imf</w>": 28403, "img</w>": 24157, "imi</w>": 23559, "imin": 23942, "imit": 23462, "imitation</w>": 41630, "imma</w>": 19487, "immac": 25085, "immaculate</w>": 29649, "immature</w>": 45531, "immedi": 7366, "immediate</w>": 14440, "immediately</w>": 10108, "immen": 17278, "immense</w>": 22722, "immensely</w>": 35013, "immer": 13954, "immerse</w>": 46240, "immersion</w>": 31861, "immersive</w>": 27521, "immigr": 5851, "immigrant</w>": 16474, "immigrants</w>": 14460, "immigration</w>": 9588, "imminent</w>": 27299, "immort": 39244, "immortal</w>": 24717, "immun": 8961, "immune</w>": 15606, "immuni": 44571, "immunity</w>": 26254, "immuno": 24361, "immunology</w>": 44483, "immunotherapy</w>": 39185, "imo": 26349, "imo</w>": 13738, "imp": 3335, "imp</w>": 31037, "impac": 7573, "impact": 33036, "impact</w>": 3844, "impacted</w>": 21424, "impactful</w>": 41631, "impacting</w>": 29359, "impacts</w>": 15069, "impair": 36451, "impaired</w>": 28028, "impairment</w>": 44501, "impala</w>": 36641, "impe": 23612, "impeach": 16874, "impeach</w>": 43497, "impeachment</w>": 32979, "impeachtrump</w>": 38006, "impecc": 34511, "impeccable</w>": 40111, "impending</w>": 34486, "imper": 7727, "imperative</w>": 39833, "imperfect</w>": 46034, "imperi": 30911, "imperial": 32425, "imperial</w>": 12361, "imperialism</w>": 48855, "imperson": 25551, "implant</w>": 33106, "implants</w>": 32202, "imple": 7423, "implement</w>": 17966, "implementation</w>": 15102, "implemented</w>": 24315, "implementing</w>": 22862, "implic": 15269, "implications</w>": 19229, "implo": 40337, "impo": 45704, "import": 2336, "import</w>": 16294, "importance</w>": 6821, "important</w>": 2829, "importantly</w>": 21580, "imported</w>": 28798, "imports</w>": 25286, "impose</w>": 35879, "imposed</w>": 25871, "imposing</w>": 42289, "impossible</w>": 9815, "impre": 3763, "impress</w>": 20015, "impressed</w>": 9689, "impression</w>": 14468, "impressionism</w>": 36114, "impressionist</w>": 44904, "impressions</w>": 22276, "impressive</w>": 6634, "imprint</w>": 43863, "imprison": 22141, "imprisoned</w>": 32999, "imprisonment</w>": 39024, "impro": 2531, "impromp": 28100, "impromptu</w>": 28611, "improv</w>": 22868, "improve</w>": 4971, "improved</w>": 9446, "improvement</w>": 10790, "improvements</w>": 16320, "improves</w>": 18035, "improving</w>": 10381, "improvis": 32343, "improvised</w>": 40886, "impulse</w>": 29683, "impy</w>": 42690, "imran": 19647, "imran</w>": 19212, "imrankhan": 25956, "imrankhanpti</w>": 26688, "ims</w>": 17800, "imsa</w>": 37262, "imv": 35731, "imvkohli</w>": 37136, "imwith": 26822, "imwithher</w>": 32651, "in": 512, "in</w>": 530, "ina": 18026, "ina</w>": 1366, "inability</w>": 47517, "inaccurate</w>": 49192, "inaction</w>": 41916, "inactive</w>": 49274, "inadequate</w>": 43403, "inak": 46549, "inal</w>": 19178, "inals</w>": 26438, "inan</w>": 26204, "inappropriate</w>": 26722, "inari</w>": 48620, "inary</w>": 11337, "inas": 36731, "inas</w>": 12362, "inated</w>": 38530, "ination</w>": 4706, "inau": 10832, "inaugu": 11309, "inaugur": 11448, "inaugural</w>": 11340, "inaugurated</w>": 29011, "inauguration</w>": 16805, "inbound</w>": 24420, "inbox</w>": 18683, "inc": 14570, "inc</w>": 4438, "incan": 45964, "incar": 18070, "incarcer": 26334, "incarcerated</w>": 49178, "incarceration</w>": 39887, "incase</w>": 30463, "ince</w>": 44303, "incen": 13259, "incense</w>": 35059, "incentive</w>": 29024, "incentives</w>": 29813, "inception</w>": 36653, "inch</w>": 6523, "incheon</w>": 30645, "inches</w>": 10809, "inci": 5747, "incidence</w>": 43371, "incident</w>": 10103, "incidents</w>": 22120, "incindia</w>": 26161, "inciner": 46434, "incl": 27857, "incl</w>": 13338, "inclined</w>": 45470, "inclu": 1738, "include</w>": 5942, "included</w>": 7414, "includes</w>": 6197, "including</w>": 2814, "inclusion</w>": 12079, "inclusive</w>": 13393, "income</w>": 8044, "incoming</w>": 15416, "incomparable</w>": 36027, "incompetent</w>": 45069, "incomplete</w>": 34040, "incon": 42372, "inconvenience</w>": 40563, "incorpor": 19335, "incorporate</w>": 34168, "incorporated</w>": 29494, "incorporating</w>": 40303, "incorrect</w>": 31872, "incre": 1870, "increase</w>": 5230, "increased</w>": 9156, "increases</w>": 13797, "increasing</w>": 10270, "increasingly</w>": 16106, "incredi": 2883, "incredible": 22128, "incredible</w>": 3457, "incredibleindia</w>": 24680, "incredibles</w>": 48641, "incredibly</w>": 9513, "incu": 38830, "incub": 24587, "incubator</w>": 35736, "incumb": 32246, "incumbent</w>": 38038, "incur": 42356, "ind": 5386, "ind</w>": 4655, "inda</w>": 15710, "inde": 2645, "indeed</w>": 10031, "indefin": 29501, "indefinitely</w>": 43750, "independ": 4147, "independence": 23117, "independence</w>": 7955, "independenceday</w>": 25971, "independent": 33844, "independent</w>": 7088, "independently</w>": 39831, "inder</w>": 29225, "index": 35209, "index</w>": 9458, "indhoven</w>": 44229, "indi": 1098, "indi</w>": 46536, "india": 27067, "india</w>": 1762, "indian": 7685, "indian</w>": 3606, "indiana</w>": 8615, "indianapolis</w>": 17196, "indianfootball</w>": 45979, "indians</w>": 10271, "indic": 7136, "indicate</w>": 26679, "indicated</w>": 39416, "indicates</w>": 29412, "indication</w>": 38539, "indicator</w>": 24776, "indicators</w>": 30054, "indicted</w>": 34992, "indictment</w>": 42278, "indie": 5260, "indie</w>": 9383, "indiedev</w>": 10863, "indiefilm</w>": 22588, "indiegame</w>": 17969, "indiegamedev</w>": 40466, "indiegames</w>": 35864, "indiegogo</w>": 38057, "indies</w>": 23618, "indiffe": 41372, "indigen": 8348, "indigenous</w>": 9303, "indigo</w>": 21002, "indira</w>": 43887, "indirec": 26398, "indirect</w>": 35416, "indivi": 5649, "individu": 9574, "individual</w>": 8512, "individually</w>": 33782, "individuals</w>": 11990, "indo": 26303, "indo</w>": 18297, "indom": 42926, "indone": 6180, "indonesia</w>": 7229, "indonesian</w>": 19593, "indoor": 44478, "indoor</w>": 9546, "indoors</w>": 22973, "indore</w>": 46143, "indu": 2298, "induc": 7973, "induced</w>": 24103, "inducted</w>": 20596, "inductee</w>": 39558, "inductees</w>": 44796, "induction</w>": 18338, "indul": 19402, "indulg": 28388, "indulge</w>": 24851, "indulgence</w>": 40856, "indulgent</w>": 49147, "industri": 5082, "industrial": 30853, "industrial</w>": 7520, "industries</w>": 11700, "industry": 47407, "industry</w>": 3318, "indv": 16942, "indy": 9821, "indy</w>": 10098, "indycar</w>": 20484, "indyref</w>": 22569, "ine": 855, "ine</w>": 715, "ineau</w>": 38122, "inec</w>": 45214, "ined</w>": 2038, "inee": 43252, "inee</w>": 7986, "inees</w>": 13056, "ineffe": 47202, "inely</w>": 18234, "inem</w>": 48876, "inema</w>": 29232, "inen</w>": 44365, "inequalities</w>": 45507, "inequality</w>": 17372, "iner": 17438, "iner</w>": 5155, "iners</w>": 41863, "ines</w>": 2137, "inese</w>": 35966, "iness</w>": 1463, "inet</w>": 8121, "inette</w>": 38911, "inev": 19527, "inevit": 45871, "inevitable</w>": 25004, "inews</w>": 24300, "inexpensive</w>": 38614, "iney</w>": 30254, "inez</w>": 12700, "inf": 1529, "inf</w>": 35241, "infamous</w>": 18688, "infan": 17219, "infant</w>": 19192, "infantry</w>": 21655, "infants</w>": 34726, "infe": 7164, "infec": 26088, "infected</w>": 26136, "infection</w>": 14774, "infections</w>": 22227, "infectious</w>": 29157, "infeld</w>": 25035, "infer": 16258, "inferno</w>": 31290, "infertility</w>": 40701, "infield</w>": 48933, "infiltr": 28683, "infin": 6246, "infinite</w>": 12748, "infiniti</w>": 34644, "infinity": 34863, "infinity</w>": 12895, "infl": 7627, "inflam": 16080, "inflammation</w>": 24893, "inflammatory</w>": 26831, "inflatable</w>": 30135, "inflation</w>": 17497, "inflicted</w>": 48188, "influ": 4835, "influen": 13229, "influence</w>": 9199, "influenced</w>": 21183, "influencer</w>": 25013, "influencers</w>": 29891, "influences</w>": 24926, "influencing</w>": 45126, "influential</w>": 17553, "influenza</w>": 39897, "info": 5680, "info</w>": 2222, "infographic</w>": 10076, "infographics</w>": 33172, "infor": 31773, "inform": 10241, "inform</w>": 19449, "informal</w>": 25705, "informat": 29625, "informatics</w>": 35685, "information</w>": 3204, "informative</w>": 19364, "informed</w>": 13876, "informing</w>": 45388, "informs</w>": 48440, "infosec</w>": 17863, "infr": 29718, "infra": 7312, "infra</w>": 45877, "infrared</w>": 22867, "infrastructure</w>": 9034, "infringe": 44882, "infringement</w>": 48712, "infront</w>": 37668, "infu": 15048, "infuri": 48461, "infused</w>": 21461, "infusion</w>": 43464, "ing": 653, "ing</w>": 519, "inga</w>": 15233, "ingco</w>": 40444, "ingday</w>": 16561, "ingdon</w>": 38731, "inge": 11790, "inge</w>": 7071, "inged</w>": 30046, "ingen</w>": 19088, "ingeni": 36884, "inger": 33883, "inger</w>": 3541, "ingfor": 33430, "ingh": 9170, "ingh</w>": 30495, "ingham": 24497, "ingham</w>": 4291, "inghamshire</w>": 39289, "inghour</w>": 42728, "inging</w>": 4066, "ingl": 45662, "ingle": 22228, "ingle</w>": 17005, "ingles</w>": 24490, "ingley</w>": 44428, "inglis</w>": 46327, "ingly</w>": 4796, "ingnow</w>": 34766, "ingo": 30175, "ingo</w>": 9012, "ingra": 45165, "ingrad</w>": 44124, "ingram</w>": 26998, "ingredi": 9272, "ingredient</w>": 19799, "ingredients</w>": 11788, "ingrid</w>": 33496, "ings</w>": 895, "ingthe": 20170, "ingtips</w>": 39373, "ington": 11846, "ington</w>": 2156, "ingu": 8714, "ingual</w>": 22795, "ingue</w>": 36838, "ingui": 12788, "inguish</w>": 36146, "inha</w>": 32612, "inhabit": 36189, "inhabitants</w>": 44968, "inhal": 30786, "inhe": 32617, "inher": 24611, "inherent</w>": 47327, "inherit": 34322, "inheritance</w>": 39341, "inherited</w>": 39111, "inhi": 25557, "inhibit": 32196, "inho</w>": 12984, "ini": 6154, "ini</w>": 3581, "inian</w>": 36638, "inim": 38717, "inindia</w>": 34021, "ining</w>": 1389, "inist</w>": 30976, "init</w>": 42670, "initi": 4580, "initial</w>": 13980, "initially</w>": 28123, "initials</w>": 48794, "initiated</w>": 27756, "initiation</w>": 41009, "initiative</w>": 8152, "initiatives</w>": 16549, "inity</w>": 22126, "inj": 5112, "injec": 13688, "injection</w>": 21438, "inju": 5006, "injured</w>": 7505, "injuries</w>": 9481, "injury</w>": 6223, "injustice</w>": 20541, "ink": 4547, "ink</w>": 967, "inka</w>": 40685, "inked</w>": 29356, "inki": 46176, "inkigayo</w>": 47882, "inking</w>": 37586, "inks</w>": 20966, "inktober</w>": 9387, "inland</w>": 21943, "inlet</w>": 35161, "inline</w>": 45004, "inlove</w>": 28415, "inmate</w>": 32341, "inmates</w>": 28216, "inmy": 42657, "inn": 27260, "inn</w>": 5569, "inna</w>": 35088, "inner": 24512, "inner</w>": 6955, "inning</w>": 4415, "innings</w>": 11580, "innis": 44059, "inno": 7961, "innocence</w>": 26383, "innocent</w>": 11241, "innov": 2890, "innovate</w>": 24549, "innovation": 33063, "innovation</w>": 4272, "innovations</w>": 18817, "innovative</w>": 8494, "innovator</w>": 34735, "innovators</w>": 27834, "ino": 4211, "ino</w>": 2691, "inoa</w>": 25649, "inos</w>": 21828, "inous</w>": 47801, "inox</w>": 22698, "input</w>": 16952, "inputs</w>": 48763, "inqu": 10628, "inqui": 18527, "inquirer</w>": 45172, "inquiries</w>": 29469, "inquiry</w>": 15865, "inquis": 31171, "inr</w>": 36325, "ins": 12786, "ins</w>": 1041, "insan": 7875, "insane</w>": 10260, "insanely</w>": 27846, "insanity</w>": 26645, "inscribed</w>": 49168, "inscription</w>": 41127, "insec": 15744, "insect</w>": 21297, "insects</w>": 18714, "insecure</w>": 35112, "insecurity</w>": 36964, "inser": 13830, "insert</w>": 18807, "insi": 3453, "inside": 19141, "inside</w>": 2912, "insider</w>": 13300, "insiders</w>": 32171, "insig": 40503, "insight</w>": 8795, "insightful</w>": 20354, "insights</w>": 8729, "insignia</w>": 48864, "insist</w>": 35504, "insisted</w>": 40423, "insists</w>": 27255, "inski</w>": 32630, "insky</w>": 24607, "insol": 42366, "insom": 21755, "insomni": 42040, "insomnia</w>": 30598, "inson</w>": 21007, "insp": 1597, "inspec": 7915, "inspect</w>": 40815, "inspecting</w>": 40565, "inspection</w>": 15142, "inspections</w>": 39513, "inspector</w>": 20514, "inspir": 2573, "inspiration</w>": 4195, "inspirational": 41936, "inspirational</w>": 9855, "inspirations</w>": 35093, "inspire": 27901, "inspire</w>": 8583, "inspired": 39849, "inspired</w>": 3516, "inspires</w>": 17245, "inspiring": 41847, "inspiring</w>": 5705, "inspo</w>": 26897, "inst": 1264, "inst</w>": 1581, "insta": 22411, "insta</w>": 11694, "instability</w>": 41377, "instac": 46678, "instaf": 33800, "instag": 14612, "instagood</w>": 23718, "instagram": 27910, "instagram</w>": 2659, "instal": 38805, "install": 6940, "install</w>": 11168, "installation</w>": 9358, "installations</w>": 27909, "installed</w>": 8807, "installing</w>": 18301, "installment</w>": 25315, "installs</w>": 45568, "instalment</w>": 47766, "instance</w>": 34572, "instant": 38810, "instant</w>": 10635, "instantly</w>": 17703, "instap": 23758, "instapic</w>": 34378, "instaweather": 43078, "instaweatherpro</w>": 43150, "inste": 3571, "instead</w>": 4191, "instein</w>": 13421, "instem</w>": 27030, "instin": 23382, "instinct</w>": 30544, "institu": 4257, "institute</w>": 5861, "institutes</w>": 43674, "institution</w>": 18823, "institutional</w>": 27442, "institutions</w>": 15207, "instore</w>": 41679, "instru": 4544, "instruc": 19648, "instruction</w>": 19407, "instructional</w>": 31022, "instructions</w>": 17040, "instructor</w>": 16087, "instructors</w>": 31998, "instrument": 42196, "instrument</w>": 15806, "instrumental</w>": 23041, "instruments</w>": 14793, "instyle</w>": 41321, "insu": 8805, "insul": 9615, "insulated</w>": 42051, "insulation</w>": 28194, "insulin</w>": 29311, "insult</w>": 26673, "insulting</w>": 39646, "insults</w>": 40451, "insur": 5024, "insurance</w>": 5870, "insured</w>": 31321, "insurers</w>": 43142, "insurtech</w>": 28716, "int": 1828, "int</w>": 1207, "inta</w>": 38314, "intact</w>": 26870, "intake</w>": 19539, "intan": 47695, "inte": 1598, "inte</w>": 41900, "intech</w>": 26504, "inted</w>": 6147, "integr": 5151, "integral</w>": 27018, "integrate</w>": 25735, "integrated</w>": 12797, "integrating</w>": 31555, "integration</w>": 12583, "integrity</w>": 14791, "intel": 11778, "intel</w>": 11426, "intellec": 13281, "intellect</w>": 47828, "intellectu": 31966, "intellectual</w>": 18069, "intelli": 5324, "intellig": 5632, "intelligence</w>": 6846, "intelligent</w>": 14063, "inten": 2967, "intend</w>": 36674, "intended</w>": 16812, "intense</w>": 10258, "intensi": 22928, "intensity</w>": 19956, "intensive</w>": 21049, "intent</w>": 18881, "intention</w>": 26786, "intentional</w>": 29536, "intentionally</w>": 31215, "intentions</w>": 26710, "inter": 1006, "inter</w>": 10093, "interact</w>": 21736, "interacting</w>": 35045, "interaction</w>": 17650, "interactions</w>": 22162, "interactive</w>": 9456, "intercep": 23676, "interception</w>": 48762, "interceptions</w>": 45313, "interchange</w>": 34222, "intercontinental</w>": 31983, "interdisciplinary</w>": 38132, "intere": 2008, "interest</w>": 5095, "interested</w>": 4620, "interesting</w>": 3628, "interests</w>": 16425, "interface</w>": 18753, "interfaith</w>": 38399, "interference</w>": 29099, "interim</w>": 19509, "interior": 10700, "interior</w>": 7305, "interiordesign</w>": 12902, "interiors</w>": 14836, "intermedi": 20246, "intermediate</w>": 24304, "intermission</w>": 44805, "intermitt": 44946, "intern": 9976, "intern</w>": 14068, "internal</w>": 11285, "internally</w>": 41134, "internation": 42534, "international": 8566, "international</w>": 2436, "internationaldayof": 41518, "internationally</w>": 24059, "internationalwomensday</w>": 17682, "interne": 32713, "internet": 30180, "internet</w>": 4757, "internetof": 44449, "internetofthings</w>": 45925, "interns</w>": 19902, "internship</w>": 16661, "internships</w>": 39410, "interoper": 45754, "interpre": 11162, "interpret": 49154, "interpret</w>": 40459, "interpretation</w>": 20652, "interpreted</w>": 42157, "interpreting</w>": 46525, "interro": 29548, "interrup": 21609, "interrupt</w>": 48449, "interrupted</w>": 30288, "intersec": 45246, "intersection</w>": 19210, "interstate</w>": 21963, "interstellar</w>": 41506, "interval</w>": 36032, "intervals</w>": 44884, "interven": 18245, "intervention</w>": 16804, "interventions</w>": 28848, "interview</w>": 2885, "interviewed</w>": 11688, "interviewing</w>": 16399, "interviews</w>": 9910, "intestin": 37938, "intestinal</w>": 38896, "inthe": 7486, "inti": 14459, "intim": 38832, "intimacy</w>": 46430, "intimate</w>": 16382, "intimid": 24041, "intimidating</w>": 44405, "intimidation</w>": 49258, "inting</w>": 15571, "intl": 38186, "intl</w>": 14224, "intment</w>": 9020, "intments</w>": 21420, "into": 35235, "into</w>": 1095, "intoler": 28534, "intolerance</w>": 37808, "intothe": 38511, "intra": 20922, "intrac": 46195, "intram": 40956, "intre": 29397, "intrepid</w>": 39127, "intri": 15421, "intric": 23763, "intricate</w>": 29616, "intrigu": 18856, "intrigue</w>": 45140, "intrigued</w>": 40034, "intriguing</w>": 24334, "intrin": 45181, "intro": 2999, "intro</w>": 13224, "introduc": 3621, "introduce</w>": 9813, "introduced</w>": 10446, "introduces</w>": 12933, "introducing</w>": 6256, "introduction</w>": 11812, "introductory</w>": 38121, "intru": 22949, "ints</w>": 2514, "intu": 17225, "intuition</w>": 40897, "intuitive</w>": 35224, "inu": 21131, "inuit</w>": 41250, "inus</w>": 45857, "inv": 2279, "inv</w>": 43786, "inva": 10084, "invade</w>": 34609, "invaded</w>": 32596, "invaders</w>": 35188, "invading</w>": 40101, "invali": 31592, "invalid</w>": 46998, "invaluable</w>": 33976, "invasi": 38100, "invasion</w>": 13378, "invasive</w>": 19554, "inve": 2024, "inven": 26233, "invent": 11665, "invent</w>": 23558, "invented</w>": 14100, "invention</w>": 23607, "inventions</w>": 44914, "inventor</w>": 22836, "inventory</w>": 19444, "inver": 12061, "inverness</w>": 33080, "inverte": 46397, "inverted</w>": 40709, "invest": 4180, "invest</w>": 9716, "invested</w>": 22536, "investig": 4626, "investigate</w>": 15703, "investigated</w>": 29180, "investigates</w>": 29621, "investigating</w>": 13713, "investigation</w>": 8194, "investigations</w>": 24020, "investigative</w>": 30233, "investigator</w>": 30528, "investigators</w>": 24121, "investin": 40195, "investing</w>": 10554, "investment</w>": 5605, "investments</w>": 14675, "investor</w>": 15490, "investors</w>": 10486, "invests</w>": 38378, "invic": 25253, "invigor": 48722, "invin": 30252, "invincible</w>": 38052, "invisible</w>": 16093, "invit": 12454, "invitation</w>": 15032, "invitational</w>": 14511, "invitations</w>": 40120, "invite</w>": 8109, "invited</w>": 7731, "invites</w>": 16034, "inviting</w>": 14349, "invo": 29417, "invol": 4000, "involve</w>": 26325, "involved</w>": 5320, "involvement</w>": 19502, "involves</w>": 22652, "involving</w>": 14786, "inwx</w>": 35674, "iny</w>": 23257, "inyour": 47954, "io": 3167, "io</w>": 3752, "ioc</w>": 43018, "iom": 33000, "iom</w>": 31135, "ion": 14871, "ion</w>": 3668, "ions</w>": 26289, "ior": 7354, "ior</w>": 2498, "iority</w>": 46016, "iors</w>": 6427, "ios</w>": 6614, "iot": 32694, "iot</w>": 6627, "iota</w>": 37294, "ious</w>": 6994, "iously</w>": 38233, "iow": 7439, "iowa": 38847, "iowa</w>": 8290, "ip": 1719, "ip</w>": 8600, "ipa</w>": 11199, "ipad": 39067, "ipad</w>": 7491, "ipads</w>": 35281, "ipc</w>": 41981, "iphone": 26030, "iphone</w>": 4314, "iphones</w>": 37561, "ipl</w>": 13440, "ipment</w>": 37824, "ipo": 40218, "ipo</w>": 24090, "ipod</w>": 17889, "ipp": 31706, "ips</w>": 26910, "ipsw": 22221, "ipswich</w>": 24494, "iq": 15554, "iq</w>": 19996, "iqbal</w>": 33553, "ir": 582, "ir</w>": 742, "ira": 4923, "ira</w>": 5371, "irah</w>": 35724, "iran": 19273, "iran</w>": 5075, "irandeal</w>": 46533, "irani</w>": 37984, "iranian</w>": 14158, "iraq</w>": 8543, "iraqi</w>": 18617, "irc</w>": 41527, "ird</w>": 2770, "ire": 3013, "ire</w>": 1454, "ired": 32728, "ired</w>": 2995, "ireland": 32806, "ireland</w>": 4157, "irene</w>": 21600, "ires</w>": 12435, "irez</w>": 21581, "irgc</w>": 47942, "iri": 2155, "iri</w>": 13880, "irical</w>": 33366, "irie</w>": 42979, "irina</w>": 46664, "iring</w>": 10169, "iris</w>": 16437, "irish": 9386, "irish</w>": 4889, "irl": 34494, "irl</w>": 8570, "irling</w>": 26493, "irls</w>": 24344, "irma</w>": 22406, "irn</w>": 42603, "iro": 23209, "iro</w>": 7280, "iron": 7699, "iron</w>": 5391, "ironic</w>": 24518, "ironically</w>": 36779, "ironing</w>": 46655, "ironman</w>": 20330, "irons</w>": 30032, "irony</w>": 20681, "irport</w>": 27769, "irr": 24641, "irrational</w>": 47413, "irregular</w>": 38692, "irrelevant</w>": 34677, "irresi": 31200, "irresistible</w>": 35252, "irresponsible</w>": 44714, "irri": 21484, "irrigation</w>": 23761, "irrit": 24218, "irs</w>": 6086, "irst</w>": 32701, "iru</w>": 48206, "irvin</w>": 47053, "irvine</w>": 24201, "irving</w>": 19738, "irwin</w>": 23750, "iry</w>": 7239, "is": 595, "is</w>": 533, "isa": 11034, "isa</w>": 6536, "isaac": 37544, "isaac</w>": 13659, "isab": 13357, "isabel</w>": 27466, "isabella</w>": 26192, "isabelle</w>": 31072, "isable</w>": 46631, "isai": 15365, "isaiah</w>": 17952, "isak": 40619, "isance</w>": 46893, "isation</w>": 7194, "isback</w>": 43811, "isc</w>": 39316, "isch</w>": 47888, "isco</w>": 5736, "iscoming</w>": 26458, "isd": 46816, "isd</w>": 12002, "ise": 7669, "ise</w>": 1479, "ised</w>": 2861, "iselle</w>": 48491, "iser": 23080, "iser</w>": 5626, "isers</w>": 34879, "ises</w>": 5153, "isf</w>": 44036, "isgreat</w>": 34595, "ish": 6844, "ish</w>": 1061, "isha</w>": 28050, "ishable</w>": 37949, "ished</w>": 35341, "ishere</w>": 46053, "ishi": 26224, "ishq": 27996, "ishqba": 32503, "ishqbaaaz</w>": 36591, "isi": 7233, "isi</w>": 17880, "isil</w>": 34636, "isin</w>": 37676, "ising</w>": 3426, "isis</w>": 7531, "isk</w>": 30171, "isl</w>": 31368, "isla</w>": 22807, "islam": 6003, "islam</w>": 8770, "islamabad</w>": 19959, "islamic": 31627, "islamic</w>": 9552, "islamist</w>": 38798, "islamophobia</w>": 43459, "island": 13408, "island</w>": 2619, "islander</w>": 45651, "islanders</w>": 27804, "islands</w>": 7145, "islay</w>": 49279, "isle": 19082, "isle</w>": 11849, "isleof": 24718, "isles</w>": 21816, "islife</w>": 26433, "islington</w>": 34945, "ism": 47730, "ism</w>": 1935, "isma": 43937, "ismail</w>": 36140, "isme</w>": 43570, "ismo</w>": 41926, "isms</w>": 18700, "isn</w>": 2923, "isner</w>": 48246, "isnow</w>": 43694, "isnt</w>": 19416, "iso": 2462, "iso</w>": 12263, "isol": 11414, "isolated</w>": 19044, "isolation</w>": 26400, "ison": 12949, "ison</w>": 4553, "isons</w>": 33318, "isoo</w>": 35857, "isp": 31397, "isp</w>": 39041, "isra": 3591, "israel": 20837, "israel</w>": 4779, "israeli</w>": 8994, "israelis</w>": 45713, "isreal</w>": 47147, "isro</w>": 44841, "iss": 11738, "iss</w>": 4950, "issa": 38579, "issa</w>": 7560, "issan": 49358, "issance</w>": 40828, "issant</w>": 38828, "isse</w>": 18986, "ission</w>": 37946, "issu": 2049, "issue</w>": 3202, "issued</w>": 9246, "issues</w>": 4082, "issuing</w>": 37226, "ist": 9751, "ist</w>": 2304, "istanbul</w>": 12258, "istandwith": 33820, "iste</w>": 32563, "ister</w>": 14555, "isthe": 46748, "istic</w>": 29556, "ists</w>": 8426, "isu": 17030, "isu</w>": 23328, "it": 529, "it</w>": 585, "ita": 36920, "ita</w>": 2864, "itable</w>": 8915, "ital": 2306, "ital</w>": 1660, "itali": 11644, "italia</w>": 11025, "italian": 20264, "italian</w>": 5175, "italians</w>": 44744, "italk</w>": 32894, "italy</w>": 4052, "itan</w>": 18383, "itans</w>": 40711, "itar</w>": 47161, "itarian</w>": 11599, "itary</w>": 17604, "itas": 31634, "itas</w>": 13436, "itate</w>": 42457, "itated</w>": 36744, "itation</w>": 5070, "itative</w>": 22892, "itc</w>": 36449, "itch": 2387, "itch</w>": 8147, "itchen</w>": 32664, "itchy</w>": 41980, "ite": 2732, "ite</w>": 802, "iteam</w>": 37828, "itec": 3099, "itec</w>": 43936, "itech": 44215, "itech</w>": 23040, "ited": 8603, "ited</w>": 1108, "itel</w>": 44638, "itely</w>": 4605, "item</w>": 8532, "items</w>": 6207, "iter": 7938, "iter</w>": 19773, "iteracy</w>": 39634, "iterate</w>": 43106, "iteration</w>": 38790, "ites</w>": 2454, "itez</w>": 42131, "itf</w>": 35436, "itfc</w>": 36519, "ith": 6133, "ith</w>": 1757, "ithaca</w>": 46257, "iti": 760, "iti</w>": 6165, "itia</w>": 22634, "itian</w>": 23365, "itic</w>": 11950, "itical</w>": 48767, "itics</w>": 33967, "ities": 41423, "ities</w>": 1480, "itim": 15676, "itiner": 32803, "itinerary</w>": 41564, "iting</w>": 1257, "ition": 25263, "ition</w>": 1104, "itions</w>": 5540, "itious</w>": 13329, "itis": 33539, "itis</w>": 8388, "itive</w>": 3067, "itly</w>": 42240, "ito": 22167, "ito</w>": 4661, "iton</w>": 21119, "itor": 47267, "itor</w>": 4584, "itors</w>": 22005, "itos</w>": 24560, "its": 7140, "its</w>": 902, "itsa": 45032, "itself</w>": 7290, "itsme": 41125, "itss": 47040, "itt": 1031, "itt</w>": 11228, "itta</w>": 21233, "itte</w>": 31962, "itted</w>": 24429, "itten": 30014, "itten</w>": 4343, "itter</w>": 11456, "itters</w>": 13082, "itti</w>": 28629, "ittin</w>": 25646, "itting</w>": 3147, "ittle": 24208, "ittle</w>": 21366, "ittles</w>": 38989, "itton</w>": 25707, "itty</w>": 35096, "itu": 1668, "itu</w>": 32128, "itude": 43382, "itude</w>": 5012, "itudes</w>": 20459, "itunes</w>": 7007, "itup</w>": 35838, "iture</w>": 25547, "itus</w>": 24364, "itutes</w>": 32883, "itv": 20159, "itv</w>": 12805, "ity": 2480, "ity</w>": 696, "itya</w>": 32055, "itz": 14544, "itz</w>": 7807, "iu": 14292, "iu</w>": 15575, "ium</w>": 10762, "ius</w>": 6740, "iv": 6775, "iv</w>": 9315, "iva</w>": 42463, "ivan": 15544, "ivan</w>": 15689, "ivanka</w>": 37914, "ive": 26885, "ive</w>": 8653, "ived</w>": 15654, "iver": 36849, "iver</w>": 44254, "ives</w>": 27333, "ivf</w>": 39159, "iving</w>": 45136, "ivory</w>": 16776, "ivote": 45835, "ivy": 36939, "ivy</w>": 16045, "iw": 13058, "iw</w>": 46604, "iwant": 42747, "iwd</w>": 16815, "iwm</w>": 44237, "ix": 13272, "ix</w>": 8756, "iy": 13704, "iya</w>": 18595, "iyaki</w>": 48395, "iz": 2845, "iz</w>": 8407, "iza</w>": 37704, "ization</w>": 10847, "ize</w>": 10885, "ized</w>": 7690, "izen</w>": 34776, "izer</w>": 23895, "izes</w>": 45434, "izing</w>": 17354, "izo</w>": 46910, "izz": 31779, "izz</w>": 46128, "izzy</w>": 28861, "j": 73, "j</w>": 329, "ja": 1586, "ja</w>": 2641, "jaan</w>": 25052, "jab": 8059, "jab</w>": 9439, "jac": 2293, "jac</w>": 30198, "jace</w>": 43286, "jack": 2679, "jack</w>": 3267, "jacked</w>": 27923, "jacket</w>": 6164, "jackets</w>": 14745, "jacki": 47418, "jackie": 28023, "jackie</w>": 11716, "jacking</w>": 40929, "jackman</w>": 35723, "jackpot</w>": 23926, "jacks</w>": 19649, "jackson": 12321, "jackson</w>": 4363, "jacksonville</w>": 19263, "jaco": 6840, "jacob": 14385, "jacob</w>": 9222, "jacobs</w>": 17482, "jacobson</w>": 46826, "jacqu": 14495, "jacqueline</w>": 22843, "jacques</w>": 17799, "jad": 12976, "jad</w>": 38691, "jada</w>": 37416, "jade": 25123, "jade</w>": 14513, "jaden</w>": 37174, "jadine</w>": 37445, "jae": 16869, "jae</w>": 15765, "jaejoong</w>": 43610, "jaf": 19362, "jag": 7984, "jag</w>": 36236, "jagan</w>": 48530, "jagger</w>": 30835, "jags</w>": 31086, "jagu": 10096, "jaguar": 44777, "jaguar</w>": 14757, "jaguars</w>": 21854, "jah": 20067, "jah</w>": 11084, "jahan": 44404, "jahan</w>": 47827, "jai": 10542, "jai</w>": 13819, "jail": 18574, "jail</w>": 9332, "jailbreak</w>": 45990, "jailed</w>": 19456, "jails</w>": 47833, "jaime</w>": 24716, "jain</w>": 21999, "jaipur</w>": 23593, "jais": 48607, "jait": 28910, "jaitley</w>": 32776, "jak": 9225, "jak</w>": 30589, "jakarta</w>": 15471, "jake": 13140, "jake</w>": 7419, "jakob</w>": 47358, "jal": 8380, "jal</w>": 26773, "jalan</w>": 27270, "jalap": 49081, "jalape": 34263, "jalapeño</w>": 43017, "jalen</w>": 33548, "jam": 1434, "jam</w>": 5201, "jama": 8977, "jama</w>": 35366, "jamaica</w>": 13019, "jamaican</w>": 25144, "jamal</w>": 26108, "jambo": 35599, "jamboree</w>": 38506, "jame": 12341, "james": 6963, "james</w>": 2392, "jamesbond</w>": 44704, "jamesc": 47004, "jameson</w>": 31731, "jami": 15092, "jamie": 16454, "jamie</w>": 8078, "jamiedor": 34310, "jamiedornan</w>": 34896, "jammed</w>": 35590, "jammin</w>": 35223, "jamming</w>": 25862, "jammu</w>": 25926, "jams</w>": 20243, "jan": 1891, "jan</w>": 3334, "jana</w>": 18182, "jane": 12389, "jane</w>": 6736, "janeiro</w>": 31740, "janet": 29665, "janet</w>": 15872, "jang": 41526, "jang</w>": 22074, "jani</w>": 22606, "janice</w>": 36048, "janine</w>": 46896, "janis</w>": 44233, "jann": 35377, "jans</w>": 22578, "jansen</w>": 45354, "janu": 3623, "january</w>": 3697, "jap": 2299, "jap</w>": 49062, "japan": 4502, "japan</w>": 3400, "japanese": 27211, "japanese</w>": 4925, "japs</w>": 42121, "jar": 5120, "jar</w>": 10837, "jard": 25778, "jardin</w>": 37371, "jare": 17654, "jared": 35597, "jared</w>": 12571, "jaredle": 36739, "jaredleto</w>": 37106, "jaro</w>": 35505, "jarpad</w>": 44497, "jarre": 23385, "jarrett</w>": 30531, "jars</w>": 27583, "jarvis</w>": 29286, "jas": 4492, "jas</w>": 17559, "jasmin": 42989, "jasmin</w>": 47700, "jasmine</w>": 17056, "jason": 10009, "jason</w>": 5395, "jasper</w>": 19827, "jat": 26106, "jau": 26932, "jauregui</w>": 48175, "jav": 6234, "java</w>": 12918, "javascri": 16289, "javascript</w>": 16423, "jave": 46218, "javed</w>": 42268, "javelin</w>": 41701, "javi</w>": 47627, "javier</w>": 23307, "jaw": 14804, "jaw</w>": 17307, "jawa": 44790, "jaws</w>": 25491, "jax": 22348, "jax</w>": 12390, "jay": 3427, "jay</w>": 4155, "jaya</w>": 21960, "jayanti</w>": 37732, "jaye</w>": 45703, "jayne</w>": 35228, "jays</w>": 12393, "jaz": 3465, "jaz</w>": 32874, "jazeera</w>": 38260, "jazz": 11488, "jazz</w>": 4528, "jazzfest</w>": 36683, "jazzy</w>": 28191, "jb": 21915, "jb</w>": 13637, "jc": 14991, "jc</w>": 11517, "jd": 18289, "jd</w>": 14125, "jdm</w>": 42013, "je": 1013, "je</w>": 8776, "jeal": 9964, "jealous</w>": 11093, "jealousy</w>": 37654, "jean": 13943, "jean</w>": 6473, "jeanette</w>": 48167, "jeanne</w>": 29201, "jeans</w>": 10157, "jeb</w>": 35101, "jec": 1347, "ject</w>": 6070, "jed": 12166, "jed</w>": 38748, "jeddah</w>": 40982, "jedi</w>": 16681, "jee": 29250, "jee</w>": 14870, "jeep": 16593, "jeep</w>": 11286, "jeeplife</w>": 43100, "jeet": 45542, "jeet</w>": 30944, "jef": 10276, "jeff": 6245, "jeff</w>": 5550, "jefferson": 44711, "jefferson</w>": 13976, "jeffery</w>": 41470, "jeffree": 45994, "jeffrey": 32886, "jeffrey</w>": 16027, "jeho": 42437, "jeky": 43893, "jekyll</w>": 49405, "jel": 9794, "jelena</w>": 48218, "jelly": 19110, "jelly</w>": 13762, "jellyfish</w>": 30988, "jem": 46326, "jem</w>": 37530, "jen": 2554, "jen</w>": 12997, "jenkins</w>": 16162, "jenn": 33921, "jenn</w>": 29869, "jenna</w>": 17125, "jenner</w>": 14260, "jenni": 6774, "jennie</w>": 28875, "jennifer": 19786, "jennifer</w>": 8613, "jennings</w>": 21564, "jenny": 20165, "jenny</w>": 13414, "jens</w>": 40806, "jensen": 35558, "jensen</w>": 19004, "jensenackles</w>": 41011, "jeon": 45200, "jeon</w>": 43337, "jeong": 47146, "jeong</w>": 39264, "jeopar": 22988, "jeopardy</w>": 29613, "jer": 2310, "jer</w>": 35307, "jere": 5614, "jeremi": 22362, "jeremiah</w>": 27301, "jeremy": 14656, "jeremy</w>": 8127, "jeremycorbyn</w>": 37484, "jeric": 25084, "jericho</w>": 28892, "jerk</w>": 23917, "jerky</w>": 40079, "jermaine</w>": 40722, "jerome</w>": 19876, "jerry": 18163, "jerry</w>": 9164, "jersey": 21921, "jersey</w>": 4471, "jerseys</w>": 15518, "jerus": 12257, "jerusalem</w>": 12557, "jes": 7686, "jes</w>": 35826, "jess": 5313, "jess</w>": 13758, "jesse": 23112, "jesse</w>": 11770, "jessi": 24373, "jessic": 14881, "jessica": 45421, "jessica</w>": 8178, "jessie</w>": 19424, "jester</w>": 44225, "jesu": 19777, "jesuit</w>": 33234, "jesus</w>": 4070, "jet": 11515, "jet</w>": 6565, "jetblue</w>": 45021, "jeter</w>": 38450, "jets": 38584, "jets</w>": 10025, "jett</w>": 44541, "jetty</w>": 46382, "jew</w>": 27450, "jewel": 4880, "jewel</w>": 17591, "jewell": 9777, "jewellers</w>": 46265, "jewellery</w>": 11192, "jewelry": 28018, "jewelry</w>": 6039, "jewels</w>": 20205, "jewish": 29594, "jewish</w>": 9104, "jews</w>": 14200, "jf": 31130, "jf</w>": 33718, "jfc</w>": 43652, "jfk</w>": 18486, "jg": 41986, "jg</w>": 35138, "jh": 24858, "jh</w>": 21485, "jha": 47012, "jha</w>": 38092, "jhal": 45695, "jhar": 31546, "jharkhand</w>": 39001, "jhb</w>": 34631, "ji": 3252, "ji</w>": 2697, "jia</w>": 32907, "jian</w>": 33427, "jiang": 43309, "jiang</w>": 25762, "jic": 48350, "jic</w>": 40215, "jid</w>": 24403, "jie</w>": 40005, "jig": 15136, "jig</w>": 47430, "jigsaw</w>": 32987, "jiha": 23194, "jihad</w>": 29637, "jihoon</w>": 44765, "jil": 36225, "jill": 24136, "jill</w>": 15254, "jillian</w>": 37820, "jim": 3190, "jim</w>": 4550, "jima</w>": 20679, "jimcantore</w>": 43950, "jimenez</w>": 35947, "jimi</w>": 30565, "jimin</w>": 16286, "jimmie</w>": 45679, "jimmy": 12215, "jimmy</w>": 6817, "jimmyfallon</w>": 45265, "jin": 7927, "jin</w>": 8485, "jind": 40609, "jing": 34933, "jing</w>": 28607, "jingle</w>": 28699, "jinnah</w>": 43141, "jinping</w>": 39308, "jinx</w>": 42977, "jinyoung</w>": 38051, "jio</w>": 40501, "jis": 25988, "jis</w>": 23515, "jisoo</w>": 43070, "jit": 11947, "jit</w>": 20308, "jitsu</w>": 24530, "jiu": 43351, "jiu</w>": 44123, "jj": 12502, "jj</w>": 12790, "jk": 20189, "jk</w>": 9702, "jkt</w>": 21494, "jl": 25027, "jl</w>": 22911, "jlo</w>": 31017, "jm": 24044, "jm</w>": 18657, "jn": 24576, "jn</w>": 21717, "jnr</w>": 37145, "jnu</w>": 47142, "jo": 683, "jo</w>": 3804, "joachim</w>": 48979, "joan": 28064, "joan</w>": 12710, "joann": 35484, "joanna</w>": 25357, "joanne": 43736, "joanne</w>": 25092, "joao</w>": 45666, "joaqu": 25140, "joaquin</w>": 30745, "job": 13114, "job</w>": 2075, "jobs</w>": 3735, "jobsearch</w>": 45459, "joburg</w>": 39343, "jocel": 36879, "jocelyn</w>": 47259, "jock</w>": 34485, "jockey</w>": 20126, "jodh": 48689, "jodi": 36812, "jodi</w>": 26888, "jodie</w>": 33100, "jody</w>": 32959, "joe": 9309, "joe</w>": 3305, "joel": 19819, "joel</w>": 11429, "joes</w>": 34756, "joey": 16281, "joey</w>": 10455, "jog": 37967, "jog</w>": 31691, "jogging</w>": 37922, "joh": 1201, "johan": 17416, "johan</w>": 27789, "johann</w>": 31180, "johanna</w>": 41494, "johannes</w>": 37779, "johannesburg</w>": 28377, "johansson</w>": 41512, "johar</w>": 34871, "john": 2004, "john</w>": 1742, "johncena</w>": 46820, "johnnie</w>": 47947, "johnny": 14464, "johnny</w>": 6904, "johns</w>": 14515, "johnson": 26036, "johnson</w>": 4010, "johnston</w>": 19791, "johnstone</w>": 40766, "johor</w>": 34750, "join": 14737, "join</w>": 1384, "joined</w>": 4954, "joining</w>": 5118, "joins</w>": 5681, "joint</w>": 6640, "jointhe": 30422, "jointly</w>": 37471, "joints</w>": 27204, "jojo": 41484, "jojo</w>": 22075, "joke</w>": 7198, "joker</w>": 18200, "jokers</w>": 44101, "jokes</w>": 11336, "joking</w>": 26112, "joko": 44975, "jol": 9174, "jol</w>": 36470, "jolie</w>": 31633, "jolla</w>": 46109, "jolly</w>": 21516, "jom": 32152, "jon": 3026, "jon</w>": 6139, "jona": 6629, "jonah": 47934, "jonah</w>": 27556, "jonas": 42373, "jonas</w>": 13650, "jonathan": 19026, "jonathan</w>": 7762, "jone": 33934, "jones": 19091, "jones</w>": 3538, "jong": 20214, "jong</w>": 14726, "jonghyun</w>": 29023, "jongin</w>": 36957, "joni</w>": 43177, "jonny": 28454, "jonny</w>": 21895, "joo": 25807, "joo</w>": 27680, "joom": 47543, "joon</w>": 18547, "joong</w>": 26544, "jop": 30486, "joplin</w>": 42688, "jor": 2482, "jor</w>": 31595, "jordan": 14644, "jordan</w>": 4388, "jordani": 46898, "jordi</w>": 44795, "jorge": 48761, "jorge</w>": 18225, "jos": 20560, "jos</w>": 19661, "jose": 4647, "jose</w>": 7075, "josef</w>": 36584, "josel": 47800, "joseph": 14163, "joseph</w>": 6478, "josephine</w>": 34866, "josh": 9998, "josh</w>": 5679, "joshi</w>": 24786, "joshu": 9112, "joshua</w>": 11852, "josi": 33583, "josie</w>": 33167, "joss</w>": 42834, "josé</w>": 27922, "jou": 19921, "jou</w>": 32029, "jour": 2078, "jour</w>": 17142, "journ": 4563, "journal</w>": 6626, "journalism</w>": 10123, "journalist</w>": 9914, "journalists</w>": 12249, "journals</w>": 24391, "journe": 48833, "journey": 32156, "journey</w>": 3749, "journeys</w>": 23329, "journo</w>": 37034, "journos</w>": 46437, "jovi</w>": 33866, "joy": 6308, "joy</w>": 4273, "joyce": 43753, "joyce</w>": 15275, "joye": 34052, "joyeux</w>": 41876, "joyful</w>": 24139, "joyous</w>": 32245, "joyride</w>": 46949, "joys</w>": 22996, "jp": 18249, "jp</w>": 10557, "jpg</w>": 36950, "jpn</w>": 36212, "jr": 13973, "jr</w>": 3605, "js": 46243, "js</w>": 8006, "jst</w>": 26523, "jt": 39480, "jt</w>": 18119, "ju": 669, "ju</w>": 9970, "jual</w>": 38720, "juan": 17148, "juan</w>": 9274, "juana</w>": 9081, "jubi": 15485, "jubil": 47743, "jubilee</w>": 16907, "juco</w>": 31570, "jud": 8363, "juda": 32478, "judah</w>": 41066, "judaism</w>": 42217, "judas</w>": 39532, "judd</w>": 29770, "judg": 20012, "judge": 16824, "judge</w>": 5656, "judged</w>": 33453, "judgement</w>": 25246, "judges</w>": 12575, "judging</w>": 16570, "judgment</w>": 24191, "judi": 42546, "judice</w>": 28032, "judicial</w>": 19579, "judiciary</w>": 24545, "judith</w>": 24047, "judo</w>": 27011, "judy": 34663, "judy</w>": 16510, "jug</w>": 27619, "jugg": 38628, "juic": 38761, "juice": 37954, "juice</w>": 6916, "juices</w>": 36757, "juicy</w>": 17623, "juju</w>": 43020, "juke": 32519, "jukebox</w>": 36411, "jul": 34662, "jul</w>": 15975, "jule": 40819, "jules</w>": 21996, "juli": 3614, "juli</w>": 49160, "julia</w>": 10207, "julian": 25459, "julian</w>": 12643, "juliana</w>": 46059, "julie": 22534, "julie</w>": 10505, "julien</w>": 32595, "juliet</w>": 20641, "juliette</w>": 44804, "julio</w>": 24888, "julius</w>": 20870, "july</w>": 2272, "jum": 20791, "jumbo</w>": 24678, "jume": 45989, "jump": 5519, "jump</w>": 6423, "jumped</w>": 16901, "jumper</w>": 16558, "jumpers</w>": 36485, "jumping</w>": 11476, "jumpman</w>": 48803, "jumps</w>": 18911, "jumpsuit</w>": 31044, "jun": 1637, "jun</w>": 7719, "junction</w>": 11320, "june": 23188, "june</w>": 2345, "jung": 13086, "jung</w>": 13031, "jungkook</w>": 20040, "jungle": 42421, "jungle</w>": 10865, "juni": 4029, "junior": 21167, "junior</w>": 5027, "juniors</w>": 16811, "juniper</w>": 33829, "junk</w>": 16000, "junkie</w>": 27613, "junkies</w>": 41207, "juno</w>": 28845, "junto</w>": 34282, "jupit": 15270, "jupiter</w>": 16212, "jur": 15896, "jura": 14715, "jurassic": 28844, "jurassic</w>": 21255, "jurgen</w>": 39263, "juris": 37010, "jurisdic": 37714, "jury</w>": 12931, "jus</w>": 14999, "just": 1770, "just</w>": 761, "justi": 14700, "justic": 30399, "justice": 16904, "justice</w>": 3604, "justicefor": 25812, "justiceleague</w>": 41929, "justices</w>": 44356, "justified</w>": 34546, "justify</w>": 28192, "justin": 7537, "justin</w>": 4394, "justinbieber</w>": 12501, "justine</w>": 34418, "justintrudeau</w>": 32184, "justsaying</w>": 42922, "juve": 47717, "juve</w>": 23092, "juven": 12944, "juvenile</w>": 19333, "juvent": 13908, "juventus": 47378, "juventus</w>": 16208, "jux": 33552, "juxta": 34964, "jv": 37932, "jv</w>": 11805, "jw": 30221, "jw</w>": 24215, "jy": 20979, "jyo": 27378, "jyoti</w>": 48696, "jä": 45381, "k": 74, "k</w>": 330, "ka": 1595, "ka</w>": 1525, "kaa</w>": 34496, "kab": 6554, "kab</w>": 45134, "kabaddi</w>": 41749, "kabir</w>": 38619, "kabo": 47974, "kabul</w>": 26160, "kac": 21693, "kach": 14341, "kad": 10901, "kade</w>": 41130, "kaduna</w>": 38053, "kae": 22542, "kaeper": 30070, "kaepernick</w>": 30713, "kaf": 19870, "kag": 13666, "kag</w>": 31003, "kah": 16068, "kah</w>": 15463, "kahn</w>": 35397, "kai": 12752, "kai</w>": 9601, "kaido</w>": 40255, "kail": 23623, "kaine</w>": 39028, "kair": 33027, "kaiser": 43685, "kaiser</w>": 29960, "kait": 19326, "kaitlyn</w>": 34948, "kaj": 44788, "kaj</w>": 40381, "kak": 10401, "kak</w>": 40128, "kaka</w>": 47689, "kaku</w>": 30900, "kal": 4187, "kal</w>": 18712, "kala": 45453, "kala</w>": 33105, "kalam</w>": 40142, "kalamaz": 42328, "kalamazoo</w>": 46264, "kalb</w>": 34483, "kale": 17162, "kale</w>": 16625, "kaleido": 41144, "kali": 17844, "kali</w>": 26964, "kalin": 42776, "kalyan</w>": 23825, "kam": 4104, "kam</w>": 26011, "kamal": 31371, "kamal</w>": 28619, "kamala</w>": 45003, "kame": 45235, "kamen</w>": 40738, "kami</w>": 28707, "kamloops</w>": 36602, "kamp": 35179, "kamp</w>": 29522, "kampala</w>": 37134, "kan": 2532, "kan</w>": 8101, "kana</w>": 35178, "kand": 17478, "kane": 32218, "kane</w>": 9765, "kang": 12226, "kang</w>": 20789, "kangar": 20622, "kangaroo</w>": 25513, "kani": 40907, "kani</w>": 41948, "kann": 18533, "kannada</w>": 30053, "kano</w>": 28201, "kans</w>": 34012, "kansas": 25507, "kansas</w>": 6539, "kansascity</w>": 46134, "kant": 39923, "kant</w>": 47132, "kanth</w>": 24427, "kanu</w>": 44565, "kany": 13590, "kanye": 29680, "kanye</w>": 14965, "kanyewest</w>": 31943, "kap": 6804, "kap</w>": 45279, "kapam": 48561, "kapil": 32337, "kapil</w>": 42709, "kapilshar": 48978, "kaplan</w>": 37401, "kapoor</w>": 9117, "kapp": 36717, "kappa</w>": 20239, "kapur</w>": 42371, "kar": 1813, "kar</w>": 5933, "kara</w>": 12552, "karab": 40916, "karachi</w>": 13671, "karak": 40372, "karan": 20077, "karan</w>": 20931, "karanjohar</w>": 47621, "karao": 16262, "karaoke</w>": 16640, "karate</w>": 21211, "kardashi": 13619, "kardashian</w>": 14578, "kare": 14310, "kare</w>": 38354, "kareem</w>": 38885, "kareena</w>": 41569, "karen": 17719, "karen</w>": 10349, "kari": 15339, "kari</w>": 15161, "karim</w>": 33477, "karin</w>": 43917, "karina</w>": 40250, "karl": 20967, "karl</w>": 13134, "karla</w>": 42309, "karma</w>": 17658, "karnat": 13994, "karnataka</w>": 15515, "karo</w>": 45305, "kart": 47841, "kart</w>": 21310, "karthik</w>": 41397, "karti": 23053, "kartikeyan</w>": 32584, "karting</w>": 41655, "kas": 6119, "kas</w>": 14372, "kasa</w>": 46111, "kash": 6954, "kash</w>": 21371, "kashi": 47945, "kashmir": 20251, "kashmir</w>": 10783, "kashmiri</w>": 35331, "kasi": 45870, "kasi</w>": 32819, "kasich</w>": 39666, "kat": 2844, "kat</w>": 9341, "kata</w>": 14558, "kate": 11620, "kate</w>": 6699, "katelyn</w>": 45963, "kath": 7386, "kath</w>": 19745, "katharine</w>": 41473, "katherine</w>": 17687, "kathle": 18721, "kathleen</w>": 21709, "kathmandu</w>": 34456, "kathniel</w>": 36159, "kathr": 14905, "kathryn": 33142, "kathryn</w>": 19999, "kathy": 34775, "kathy</w>": 18795, "kati": 6515, "kati</w>": 29928, "katic</w>": 48058, "katie": 24117, "katie</w>": 9076, "katniss</w>": 47916, "kato</w>": 27573, "katrin": 31282, "katrina</w>": 21397, "katrinakaif</w>": 45845, "kats</w>": 44213, "katsu": 49296, "katsu</w>": 43712, "katy": 17609, "katy</w>": 14435, "katyperry</w>": 28309, "katz</w>": 30790, "kau": 9299, "kau</w>": 36895, "kauai</w>": 44050, "kaufman</w>": 37188, "kaur</w>": 30518, "kav": 10228, "kavan": 18576, "kavanaugh</w>": 20252, "kaw": 10842, "kaw</w>": 42719, "kawa</w>": 33244, "kawaii</w>": 26891, "kawasaki</w>": 28227, "kawhi</w>": 41220, "kay": 4673, "kay</w>": 9862, "kaya</w>": 22752, "kayak</w>": 27043, "kayaking</w>": 28977, "kaye</w>": 33003, "kayla</w>": 17139, "kaylee</w>": 47215, "kayo</w>": 37021, "kaz": 8812, "kaz</w>": 39622, "kazakh": 25451, "kazakhstan</w>": 26720, "kazan</w>": 47641, "kb": 27381, "kb</w>": 19960, "kbs</w>": 27418, "kc": 10869, "kc</w>": 8638, "kca</w>": 14347, "kcon</w>": 39970, "kcr": 46181, "kd": 21826, "kd</w>": 15597, "kday</w>": 31074, "kdrama</w>": 48628, "ke": 643, "ke</w>": 618, "kea</w>": 47926, "kean": 43288, "keane</w>": 28635, "keanu</w>": 40608, "kear": 21562, "kearney</w>": 36435, "keating</w>": 40045, "keaton</w>": 29975, "kebab</w>": 36497, "ked": 11730, "ked</w>": 1243, "kee": 9724, "kee</w>": 6760, "keef</w>": 42323, "keefe</w>": 46965, "keegan</w>": 31122, "keel</w>": 48376, "keen": 17714, "keen</w>": 13218, "keenan</w>": 36276, "keep": 2924, "keep</w>": 1726, "keeper</w>": 7650, "keepers</w>": 16130, "keepin</w>": 41712, "keeping": 38371, "keeping</w>": 4873, "keepit": 28044, "keeps</w>": 6333, "keer": 27412, "keerth": 47500, "keerthyofficial</w>": 48185, "kees</w>": 10791, "keg</w>": 32785, "keh": 41272, "keh</w>": 36983, "kei": 18735, "kei</w>": 24835, "keith": 18762, "keith</w>": 8252, "kej": 15674, "kejri": 16617, "kejriwal</w>": 17334, "keke</w>": 39195, "kel": 2825, "kel</w>": 7553, "kele": 41765, "kell": 16082, "kell</w>": 40103, "keller</w>": 21407, "kelley</w>": 23776, "kelli": 45852, "kelli</w>": 46190, "kellie</w>": 49224, "kellogg</w>": 44218, "kelly": 13417, "kelly</w>": 5220, "kelown": 31708, "kelowna</w>": 32963, "kelsey": 42295, "kelsey</w>": 23018, "kelvin</w>": 32859, "kem": 31013, "kem</w>": 17349, "kemp": 18302, "kemp</w>": 25325, "ken": 1838, "ken</w>": 1702, "kend": 7497, "kendal</w>": 44836, "kendall": 34607, "kendall</w>": 16238, "kendra</w>": 36074, "kendrick": 41787, "kendrick</w>": 21953, "kendricklamar</w>": 47020, "kenne": 6209, "kennedy": 38631, "kennedy</w>": 9004, "kennel</w>": 39595, "kenneth": 46900, "kenneth</w>": 17839, "kenney</w>": 41373, "kenny": 20185, "kenny</w>": 9595, "kens</w>": 29765, "kensing": 21505, "kensington</w>": 24988, "kent": 13875, "kent</w>": 8214, "kentu": 9045, "kentucky": 32230, "kentucky</w>": 10014, "keny": 17374, "kenya</w>": 6181, "kenyan</w>": 22624, "kenyans</w>": 36263, "kenyatta</w>": 31012, "kenzie</w>": 38087, "keo": 43062, "kept</w>": 7737, "ker": 2352, "ker</w>": 1485, "keral": 35122, "kerala</w>": 11881, "kered</w>": 26690, "kerel</w>": 32232, "keri</w>": 43447, "kermit</w>": 40908, "kern</w>": 40150, "kernel</w>": 40684, "kerr</w>": 20491, "kerri": 41849, "kerry": 24795, "kerry</w>": 13097, "kers": 30347, "kers</w>": 2880, "kershaw</w>": 40785, "kerson</w>": 42810, "kerswednesday</w>": 48152, "kert</w>": 47279, "kes": 38398, "kes</w>": 1115, "kesh</w>": 19751, "kesha</w>": 36526, "kest</w>": 15080, "ket": 2715, "ket</w>": 1236, "ketball</w>": 38240, "ketch": 22590, "ketch</w>": 35371, "ketchup</w>": 26724, "kete": 25404, "keted</w>": 41396, "keting</w>": 15951, "keto": 27485, "keto</w>": 28754, "kets</w>": 1632, "kett": 23124, "kett</w>": 10312, "kettering</w>": 43779, "kettle": 41992, "kettle</w>": 24303, "kev": 22758, "kev</w>": 29419, "kevin": 9419, "kevin</w>": 4685, "kew": 38014, "kew</w>": 31409, "kex": 30251, "key": 2891, "key</w>": 1458, "keyan</w>": 27617, "keyboard</w>": 13017, "keyboards</w>": 49237, "keychain</w>": 31050, "keye": 40516, "keye</w>": 20635, "keyes</w>": 18336, "keynes</w>": 32462, "keynote</w>": 7556, "keys": 48912, "keys</w>": 6355, "keystone</w>": 30688, "keyword</w>": 42284, "keywords</w>": 48122, "kf": 33308, "kf</w>": 42119, "kfc</w>": 22032, "kg": 36772, "kg</w>": 7817, "kgs</w>": 46629, "kh": 2166, "kh</w>": 7452, "kha": 7333, "kha</w>": 18929, "khair": 43742, "khaki</w>": 41646, "khal": 13070, "khaled</w>": 29343, "khali": 11324, "khalid</w>": 27166, "khalifa</w>": 21389, "khalil</w>": 36229, "kham": 24892, "khan": 13318, "khan</w>": 3873, "khand": 43384, "khand</w>": 31110, "khanna</w>": 29931, "khar": 18340, "khar</w>": 28578, "khart</w>": 37458, "khat": 43290, "khe": 26360, "kher</w>": 43843, "khi": 39062, "khi</w>": 42925, "khil": 34101, "khloe</w>": 45312, "kho": 14022, "kho</w>": 28774, "khou</w>": 30656, "khs</w>": 21239, "khtar</w>": 45593, "khu": 14041, "khur": 32083, "khy": 40917, "khz</w>": 45604, "ki": 848, "ki</w>": 2608, "kia</w>": 8712, "kian": 43961, "kian</w>": 25708, "kians</w>": 44010, "kib": 43108, "kiba": 37207, "kic": 24003, "kic</w>": 27633, "kicchasu": 44665, "kicchasudeep</w>": 45560, "kick": 4102, "kick</w>": 4289, "kickass</w>": 39299, "kickboxing</w>": 36041, "kicked</w>": 12479, "kicker</w>": 26338, "kickin</w>": 34597, "kicking</w>": 7802, "kickoff</w>": 10245, "kicks</w>": 6989, "kickstart</w>": 40780, "kickstarter</w>": 13228, "kid": 3948, "kid</w>": 3551, "kidd</w>": 24082, "kidding</w>": 14535, "kiddo</w>": 36360, "kiddos</w>": 29205, "kidlit": 39064, "kidlit</w>": 33515, "kidlitart</w>": 41600, "kidman</w>": 44931, "kidnap</w>": 45100, "kidnapp": 16183, "kidnapped</w>": 24737, "kidnapping</w>": 32361, "kidney": 37835, "kidney</w>": 14610, "kids": 15561, "kids</w>": 1911, "kidz</w>": 41938, "kie": 8544, "kie</w>": 3094, "kiefer</w>": 48026, "kiel": 40940, "kiel</w>": 25509, "kien</w>": 28782, "kier": 20403, "kier</w>": 35575, "kieran</w>": 29231, "kies": 36601, "kies</w>": 4993, "kiest</w>": 29755, "kiev</w>": 24585, "kiewicz</w>": 47574, "kigali</w>": 40278, "kii</w>": 39340, "kik</w>": 36176, "kiki</w>": 23962, "kiko</w>": 40861, "kil": 4912, "kil</w>": 39337, "kildare</w>": 45541, "kili": 24386, "kilig</w>": 49172, "kilimanjaro</w>": 43470, "kilkenny</w>": 33805, "kill": 6163, "kill</w>": 4367, "killa</w>": 41355, "killarney</w>": 48813, "killed</w>": 3733, "killer": 28230, "killer</w>": 6613, "killers</w>": 17614, "killin</w>": 25903, "killing": 37977, "killing</w>": 5923, "killings</w>": 24918, "kills</w>": 9795, "kiln</w>": 44150, "kilo": 39281, "kilom": 26285, "kilometers</w>": 39192, "kilometres</w>": 43278, "kilt</w>": 49319, "kim": 4639, "kim</w>": 4606, "kimber": 16796, "kimberley</w>": 39859, "kimberly</w>": 27465, "kimchi</w>": 41027, "kimi</w>": 31536, "kimkardashian</w>": 35400, "kimmel</w>": 27820, "kimono</w>": 40024, "kin": 1442, "kin</w>": 2667, "kina</w>": 28518, "kind": 7204, "kind</w>": 3044, "kinda</w>": 6612, "kinder": 12711, "kinder</w>": 24159, "kindergarten</w>": 16749, "kindle": 24704, "kindle</w>": 10746, "kindleunlimited</w>": 32164, "kindly</w>": 13952, "kindness": 45112, "kindness</w>": 10614, "kinds</w>": 14879, "kine": 17607, "kineni</w>": 49080, "kinetic</w>": 37699, "king": 2365, "king</w>": 674, "kingdom": 21870, "kingdom</w>": 7364, "kingdomhearts</w>": 48570, "kingdoms</w>": 43890, "kingfisher</w>": 34330, "kingjames</w>": 33153, "kingly</w>": 33642, "kingof": 27878, "kings": 18590, "kings</w>": 4232, "kingsley</w>": 41807, "kingston": 40736, "kingston</w>": 15393, "kini</w>": 41644, "kinky</w>": 37006, "kinney</w>": 37233, "kino</w>": 39000, "kins": 31060, "kins</w>": 4386, "kinson</w>": 12095, "kio": 28210, "kio</w>": 39401, "kiosk</w>": 39146, "kip": 27636, "kip</w>": 15986, "kipp": 43329, "kir": 3476, "kir</w>": 32949, "kira</w>": 33038, "kiran": 43234, "kiran</w>": 36603, "kirby</w>": 17065, "kiri": 34170, "kiri</w>": 45826, "kirk": 10639, "kirk</w>": 11508, "kirkland</w>": 43061, "kiro</w>": 39749, "kirstel</w>": 46483, "kirsten</w>": 31813, "kirsty</w>": 37787, "kis": 3199, "kis</w>": 22796, "kish": 25662, "kiss": 43757, "kiss</w>": 5946, "kissed</w>": 22561, "kisses": 47876, "kisses</w>": 11220, "kissing</w>": 18637, "kistan</w>": 29580, "kit": 4566, "kit</w>": 4274, "kita</w>": 29961, "kitch": 3850, "kitchen": 18131, "kitchen</w>": 4485, "kitchener</w>": 34428, "kitchens</w>": 28301, "kite": 47777, "kite</w>": 19867, "kites</w>": 45829, "kits</w>": 13730, "kitt": 10840, "kitten</w>": 13063, "kittens</w>": 17216, "kitties</w>": 36013, "kitty": 25067, "kitty</w>": 8417, "kiwan": 38709, "kiwanis</w>": 46513, "kiwi</w>": 22440, "kiwis</w>": 48108, "kiya</w>": 41610, "kj": 27385, "kj</w>": 28238, "kja": 41048, "kjv</w>": 37387, "kk": 4390, "kk</w>": 10849, "kka</w>": 19002, "kke</w>": 44239, "kker</w>": 32399, "kki</w>": 44672, "kkk</w>": 20073, "kkkk": 15834, "kkkk</w>": 47160, "kkkkkkkk": 31042, "kko</w>": 43965, "kkr</w>": 40855, "kl": 8498, "kl</w>": 14134, "kla": 11249, "klan</w>": 46935, "klar": 41374, "klaus</w>": 31788, "kle": 7612, "kle</w>": 7432, "klein": 33475, "klein</w>": 17579, "kley</w>": 18594, "kli": 31640, "klin": 44809, "klin</w>": 41647, "kline</w>": 47580, "kling": 40270, "klm</w>": 38859, "klo": 15296, "klopp</w>": 26446, "kltu</w>": 25978, "klu": 21852, "kly</w>": 45090, "km": 29954, "km</w>": 4590, "kman</w>": 33312, "kms</w>": 24996, "kn": 4825, "kn</w>": 23693, "knapp</w>": 33945, "kne": 6358, "knee</w>": 9897, "knees</w>": 19115, "kner</w>": 31578, "knew</w>": 5009, "kni": 6312, "knick": 33286, "knicks</w>": 17657, "knife": 44176, "knife</w>": 8960, "knigh": 43099, "knight": 17949, "knight</w>": 7355, "knights</w>": 10385, "knit": 18745, "knit</w>": 14313, "knitted</w>": 28151, "knitting</w>": 18863, "knives</w>": 20910, "kno": 1482, "kno</w>": 25362, "knob</w>": 29736, "knobs</w>": 47504, "knock": 14195, "knock</w>": 11583, "knocked</w>": 15325, "knocking</w>": 20380, "knockout</w>": 22602, "knocks</w>": 24296, "knoll</w>": 43882, "knot</w>": 18412, "knots</w>": 32428, "know": 4179, "know</w>": 1038, "knowing</w>": 9267, "knowledge": 27864, "knowledge</w>": 5510, "knowledgeable</w>": 43391, "knowles</w>": 32631, "known</w>": 3102, "knows</w>": 4309, "knowyour": 30773, "knox": 18630, "knox</w>": 21833, "knoxville</w>": 23232, "knu": 14812, "knuck": 21333, "knuckle</w>": 42023, "knuckles</w>": 40127, "knw</w>": 40803, "ko": 1313, "ko</w>": 2448, "koala</w>": 36654, "kobe": 42644, "kobe</w>": 14470, "kobo</w>": 42390, "koch</w>": 25331, "kochi</w>": 36710, "kodak</w>": 30425, "kodi": 46611, "kof": 17528, "koff</w>": 47303, "kofi</w>": 40400, "koh": 13379, "koh</w>": 31216, "kohl</w>": 48479, "kohli</w>": 17549, "koi</w>": 28150, "kojima</w>": 46419, "kok": 32045, "kok</w>": 11225, "koko": 42426, "koko</w>": 40003, "kol": 7142, "kol</w>": 31023, "kolkata</w>": 18011, "kom": 6686, "kom</w>": 24181, "kombat</w>": 29670, "kombucha</w>": 48615, "komo</w>": 31820, "kon": 5743, "kon</w>": 29519, "kona</w>": 30203, "kong": 31784, "kong</w>": 6506, "konstant": 46583, "koo": 12225, "koo</w>": 40472, "kook</w>": 16003, "kool": 36755, "kool</w>": 26444, "kop": 16623, "kop</w>": 38999, "kor": 6428, "kor</w>": 24175, "kore": 3919, "korea</w>": 5915, "korean": 31949, "korean</w>": 8034, "kori</w>": 42842, "korn": 45412, "korn</w>": 31492, "kors</w>": 34535, "kos": 47438, "kos</w>": 22951, "kosh</w>": 45233, "kosher</w>": 36502, "koso": 23892, "kosovo</w>": 28343, "kot": 23323, "kot</w>": 20701, "kota</w>": 21735, "koto": 40945, "koto</w>": 29977, "kou": 18502, "kou</w>": 39614, "kour": 34134, "kov": 17733, "kov</w>": 15156, "kova</w>": 26185, "koval": 47903, "kovic</w>": 16886, "kovich</w>": 44794, "kovsky</w>": 33384, "kow": 29764, "kow</w>": 23919, "kowski</w>": 17649, "koz": 29598, "kp": 16174, "kp</w>": 16894, "kpa</w>": 38759, "kph</w>": 41138, "kpk</w>": 42094, "kpmg</w>": 38243, "kpop": 29534, "kpop</w>": 15859, "kprc</w>": 47832, "kprs</w>": 46253, "kr": 7309, "kr</w>": 14107, "kra": 5762, "kraft</w>": 28057, "kraja</w>": 29016, "kraken</w>": 48408, "krakow</w>": 40033, "kram": 19075, "kramer</w>": 27495, "kran": 33243, "kranti</w>": 47969, "krat": 30470, "kre": 8362, "kreme</w>": 43140, "kremlin</w>": 33979, "kri": 3679, "kris": 35251, "kris</w>": 12261, "krish": 11487, "krishna</w>": 15863, "krishnan</w>": 46535, "krispy</w>": 49292, "krist": 16490, "kristen": 28881, "kristen</w>": 16644, "kristi": 26895, "kristin": 35408, "kristin</w>": 26785, "kristina</w>": 33180, "krit": 36265, "kro": 16193, "kroger</w>": 36344, "kron": 25999, "kru": 10609, "kruger</w>": 32948, "krun": 43084, "kry": 13995, "krystal</w>": 36554, "ks": 10470, "ks</w>": 662, "ksa</w>": 25439, "ksh</w>": 36594, "kst</w>": 17420, "kstate": 48590, "ksu</w>": 43496, "kswx</w>": 36180, "kt": 17238, "kt</w>": 7792, "ktm</w>": 33989, "ktn": 42170, "kton</w>": 37848, "kts</w>": 48577, "ktv": 36444, "ku": 1836, "ku</w>": 4827, "kuala</w>": 30336, "kubball</w>": 48995, "kuber": 41336, "kubernetes</w>": 45144, "kubrick</w>": 37032, "kuch</w>": 39394, "kud": 40818, "kudos</w>": 14481, "kul": 11325, "kul</w>": 31514, "kum": 18086, "kum</w>": 28148, "kuma": 43139, "kuma</w>": 33920, "kumar": 22329, "kumar</w>": 7674, "kumb": 31391, "kun": 6849, "kun</w>": 21842, "kung": 39656, "kung</w>": 22347, "kunst</w>": 37881, "kup": 39023, "kups</w>": 27240, "kur": 4862, "kurdi": 23504, "kurdish</w>": 21644, "kurdistan</w>": 24459, "kurds</w>": 20888, "kuri": 46375, "kuro": 28239, "kuro</w>": 47826, "kurt": 31903, "kurt</w>": 14527, "kus": 27618, "kus</w>": 27505, "kush": 22264, "kush</w>": 24594, "kushner</w>": 36716, "kut": 17283, "kut</w>": 36965, "kuwait</w>": 19679, "kuya</w>": 34815, "kuz": 33253, "kv": 27594, "kv</w>": 34249, "kw": 10072, "kw</w>": 18339, "kwa": 32784, "kwa</w>": 48576, "kwame</w>": 46681, "kwan": 37100, "kwan</w>": 39447, "kwang</w>": 40260, "kwe": 26050, "kwi": 35327, "kwon</w>": 36369, "kx": 28190, "kx</w>": 46442, "ky": 2018, "ky</w>": 2383, "kya</w>": 29142, "kyc</w>": 37758, "kyiv</w>": 36422, "kyle": 15847, "kyle</w>": 7539, "kylie": 28282, "kylie</w>": 17983, "kyliejenner</w>": 47232, "kylo</w>": 47704, "kyo": 13150, "kyo</w>": 6281, "kyoto</w>": 23223, "kyr": 26329, "kyrgy": 40013, "kyrgyz": 48346, "kyrie</w>": 21857, "kyu": 28296, "kyu</w>": 25490, "kyuhyun</w>": 37229, "kyung</w>": 41058, "kyungsoo</w>": 30280, "kywx</w>": 39940, "kz": 48743, "kz</w>": 36848, "kzn</w>": 38264, "kö": 32437, "l": 75, "l</w>": 331, "la": 572, "la</w>": 1210, "laa</w>": 44642, "lab": 3537, "lab</w>": 4352, "labe": 25749, "label": 12235, "label</w>": 9093, "labeled</w>": 32720, "labeling</w>": 36825, "labelled</w>": 45188, "labels</w>": 17413, "lable</w>": 31879, "labor": 11201, "labor</w>": 7878, "laboratories</w>": 43421, "laboratory</w>": 17664, "laborday</w>": 39324, "labou": 32700, "labour": 19586, "labour</w>": 6019, "labourdoorstep</w>": 37008, "labout": 35961, "labra": 37067, "labrador</w>": 25409, "labs</w>": 12021, "laby": 29131, "labyrin": 31782, "labyrinth</w>": 35594, "lac": 4477, "lac</w>": 16189, "lace": 30012, "lace</w>": 5421, "laced</w>": 36800, "laces</w>": 23281, "lacey</w>": 31754, "lach": 30558, "lack": 24915, "lack</w>": 8069, "lacking</w>": 30080, "lacks</w>": 34388, "laco": 45882, "lacrosse</w>": 12915, "lacy</w>": 38645, "lad": 15991, "lad</w>": 10707, "ladak": 42312, "ladakh</w>": 45295, "ladder</w>": 16637, "ladders</w>": 47125, "lade</w>": 26447, "laden</w>": 28634, "ladi": 12934, "ladies": 28932, "ladies</w>": 3431, "lads</w>": 9803, "lady": 7275, "lady</w>": 2909, "ladybird</w>": 43389, "ladybug</w>": 40038, "ladygaga</w>": 21232, "laf": 47555, "lafayette</w>": 22683, "lag": 30932, "lag</w>": 20394, "laga</w>": 30161, "lage</w>": 24369, "lager": 36811, "lager</w>": 22989, "lagh": 37237, "laghate": 47565, "laghateparth</w>": 48780, "lagi</w>": 39786, "lago": 42698, "lago</w>": 31476, "lagoon</w>": 22753, "lagos</w>": 12728, "lagun": 18500, "laguna</w>": 23609, "lah": 27315, "lah</w>": 4299, "lahat</w>": 42164, "lahore</w>": 16733, "lai</w>": 23947, "laid": 42560, "laid</w>": 11160, "lain": 46958, "lain</w>": 17151, "laine</w>": 35860, "lair</w>": 31981, "lais</w>": 34923, "lak": 12890, "lak</w>": 26793, "lake": 6441, "lake</w>": 2553, "lakedistrict</w>": 26437, "lakel": 26133, "lakeland</w>": 34306, "laker": 45717, "lakers</w>": 13570, "lakes</w>": 9265, "lakeshore</w>": 42595, "lakeside</w>": 30915, "lakewood</w>": 36417, "lakh</w>": 21487, "lakhs</w>": 37985, "lakings</w>": 34289, "lakota</w>": 45510, "laksh": 24937, "lakshmi</w>": 39682, "lal": 12301, "lal</w>": 19430, "lala</w>": 33661, "lali": 21726, "laliga</w>": 32383, "lam": 2022, "lam</w>": 5704, "lama</w>": 26049, "lamar": 28678, "lamar</w>": 17284, "lamb": 19863, "lamb</w>": 10034, "lambda</w>": 36687, "lambert</w>": 14574, "lambeth</w>": 43410, "lambo</w>": 45464, "lamborgh": 18709, "lamborghini</w>": 19462, "lambs</w>": 30361, "lame</w>": 23192, "lamin": 22337, "laminated</w>": 49079, "lamo": 41461, "lamont</w>": 46719, "lamp": 26700, "lamp</w>": 10725, "lampard</w>": 39989, "lamps</w>": 23424, "lan": 1193, "lan</w>": 4872, "lana</w>": 15406, "lanapar": 47437, "lanaparrilla</w>": 47819, "lanc": 11872, "lanca": 15694, "lancashire</w>": 20939, "lancaster</w>": 16446, "lance": 26025, "lance</w>": 11609, "lancer</w>": 38195, "lancers</w>": 46392, "lancia</w>": 48698, "lancs</w>": 47540, "land": 1567, "land</w>": 973, "lande</w>": 36556, "landed</w>": 9873, "lander": 37247, "lander</w>": 9666, "landers</w>": 20019, "landfall</w>": 38465, "landfill</w>": 34947, "landia</w>": 41384, "landing</w>": 8292, "landings</w>": 46104, "landlord</w>": 28938, "landlords</w>": 35283, "landmark</w>": 15208, "landmarks</w>": 30393, "lando": 25463, "lando</w>": 7065, "landon</w>": 32748, "landrover</w>": 38125, "landry</w>": 36137, "lands": 40223, "lands</w>": 2961, "landsc": 4384, "landscape": 21123, "landscape</w>": 5727, "landscapephotography</w>": 28125, "landscapes</w>": 15344, "landscaping</w>": 25642, "landslide</w>": 31954, "lane": 25534, "lane</w>": 3980, "lanes</w>": 10345, "laney</w>": 38552, "lang": 7969, "lang</w>": 8578, "lange</w>": 32021, "langford</w>": 45615, "langley</w>": 28595, "langu": 4095, "language": 46103, "language</w>": 4781, "languages</w>": 13527, "lani</w>": 22964, "lanka</w>": 16221, "lankan</w>": 40531, "lannister</w>": 49056, "lans": 43550, "lansing</w>": 30805, "lant</w>": 44504, "lanta</w>": 44768, "lantern</w>": 17185, "lanterns</w>": 33676, "lantic": 32601, "lantic</w>": 27678, "lants</w>": 38425, "lanyard</w>": 46808, "lao": 32475, "lao</w>": 29521, "laos</w>": 34353, "lap": 7213, "lap</w>": 8639, "lapd</w>": 32557, "lapel</w>": 47961, "lapland</w>": 43633, "laps</w>": 18711, "lapse</w>": 33365, "laptop</w>": 10464, "laptops</w>": 32189, "laq</w>": 45026, "lar": 1592, "lar</w>": 1652, "lara</w>": 19435, "lard</w>": 40347, "lare": 22415, "laredo</w>": 48427, "large": 40234, "large</w>": 3638, "largely</w>": 21418, "larger</w>": 12567, "largest</w>": 4960, "largo</w>": 44161, "lari": 34676, "lark": 43164, "lark</w>": 23536, "larkin</w>": 34769, "larry": 18642, "larry</w>": 8242, "lars</w>": 8669, "larsen</w>": 39721, "larson</w>": 27973, "larvae</w>": 44840, "las": 8295, "las</w>": 2552, "lasag": 31210, "lasagna</w>": 40683, "lasalle</w>": 43866, "laser": 25607, "laser</w>": 9885, "lasers</w>": 37060, "lash": 31995, "lash</w>": 18480, "lashes</w>": 21015, "lass": 24203, "lass</w>": 18263, "lassic</w>": 39430, "last": 10600, "last</w>": 952, "lasted</w>": 25711, "lasting</w>": 13434, "lastnight</w>": 30159, "lasts</w>": 20141, "lasvegas</w>": 17789, "lat": 1591, "lat</w>": 28437, "lata</w>": 47114, "latam</w>": 40012, "late": 13267, "late</w>": 2325, "latel": 49035, "lately</w>": 11824, "latepost</w>": 48328, "later": 24109, "later</w>": 2941, "lateral</w>": 26646, "latest": 46805, "latest</w>": 2053, "latex</w>": 27520, "lati": 16357, "latimes</w>": 43356, "latin": 16695, "latin</w>": 9888, "latina</w>": 27936, "latino": 45734, "latino</w>": 19470, "latinos</w>": 40233, "lation</w>": 6191, "latitude</w>": 37392, "lative</w>": 15719, "lator</w>": 9291, "lators</w>": 28278, "latt": 33561, "latte</w>": 17697, "latter</w>": 26198, "latvia</w>": 30034, "lau": 1853, "lau</w>": 23090, "lauderdale</w>": 24352, "laugh": 4969, "laugh</w>": 6332, "laughed</w>": 16746, "laughing</w>": 8301, "laughs</w>": 14322, "laughter</w>": 10722, "laun": 2944, "launch": 31168, "launch</w>": 2904, "launched</w>": 6125, "launcher</w>": 35782, "launches</w>": 7023, "launching</w>": 8565, "laundering</w>": 34079, "laundry</w>": 14797, "laur": 15256, "laura": 17091, "laura</w>": 7763, "laure": 16932, "laureate</w>": 25675, "laurel": 43370, "laurel</w>": 19942, "lauren": 10456, "lauren</w>": 7634, "laurence</w>": 29353, "laurent</w>": 23226, "laurie</w>": 20326, "laus": 38895, "laus</w>": 28111, "lause</w>": 22269, "laut": 47688, "lav": 13767, "lav</w>": 26919, "lava</w>": 16765, "laven": 15047, "lavender</w>": 16033, "laver": 28188, "lavish</w>": 35443, "law": 2874, "law</w>": 2606, "lawful</w>": 33845, "lawler</w>": 47862, "lawless</w>": 39468, "lawmaker</w>": 37169, "lawmakers</w>": 21190, "lawn": 31675, "lawn</w>": 11024, "lawrence": 32221, "lawrence</w>": 8820, "laws</w>": 7306, "lawson</w>": 22152, "lawsuit</w>": 14346, "lawsuits</w>": 44331, "lawyer</w>": 10552, "lawyers</w>": 14232, "lax": 17750, "lax</w>": 10024, "lay": 7205, "lay</w>": 6360, "laye": 25995, "layer</w>": 12411, "layered</w>": 28520, "layers</w>": 15900, "laying</w>": 12333, "layla</w>": 45050, "layne</w>": 48721, "layo": 21738, "layoffs</w>": 29019, "layout</w>": 17314, "lays</w>": 19546, "layton</w>": 38061, "laz": 18806, "lazar": 33075, "lazarus</w>": 49126, "laze</w>": 41559, "lazer</w>": 43735, "lazio</w>": 33010, "lazy": 32614, "lazy</w>": 10753, "lb": 21958, "lb</w>": 7422, "lbc</w>": 37694, "lbj</w>": 45683, "lbloggers</w>": 48695, "lbs</w>": 8912, "lc": 9584, "lc</w>": 7225, "lcd</w>": 21356, "lcfc</w>": 25339, "lcs</w>": 32279, "ld": 1431, "ld</w>": 730, "lder</w>": 6945, "lders</w>": 43221, "ldn": 37050, "ldn</w>": 2517, "ldnont</w>": 25827, "ldnt</w>": 21690, "ldr</w>": 37279, "lds</w>": 31235, "le": 534, "le</w>": 579, "lea": 2246, "lea</w>": 13324, "leach</w>": 35527, "lead": 1328, "lead</w>": 2784, "leader": 14806, "leader</w>": 3236, "leaderboard</w>": 34519, "leaders</w>": 3546, "leadership": 36876, "leadership</w>": 3652, "leading</w>": 3833, "leads</w>": 5335, "leaf": 9377, "leaf</w>": 7232, "leaflet</w>": 38289, "leaflets</w>": 39014, "leafs</w>": 16688, "leafy</w>": 42616, "leagu": 13317, "league": 16635, "league</w>": 2313, "leagueof": 26022, "leagueoflegends</w>": 31737, "leagues</w>": 19888, "leah": 24350, "leah</w>": 19308, "leak": 42900, "leak</w>": 15489, "leaked</w>": 14353, "leaking</w>": 34097, "leaks</w>": 15657, "leam": 39606, "lean": 12447, "lean</w>": 8208, "leaning</w>": 24411, "leanne</w>": 41448, "leans</w>": 9357, "leap": 29129, "leap</w>": 15392, "leaps</w>": 48080, "lear": 1146, "lear</w>": 27663, "learn": 16959, "learn</w>": 1768, "learned</w>": 6048, "learnenglish</w>": 49040, "learner</w>": 33547, "learners</w>": 19572, "learning": 22632, "learning</w>": 2378, "learns</w>": 17569, "learnt</w>": 18959, "leary</w>": 36051, "lease": 49041, "lease</w>": 14394, "leased</w>": 48352, "leash</w>": 36192, "leasing</w>": 29160, "least</w>": 3651, "leather": 21417, "leather</w>": 5862, "leau</w>": 26498, "leav": 3198, "leave": 37512, "leave</w>": 3258, "leaves</w>": 5579, "leaving</w>": 5216, "leban": 9360, "lebanese</w>": 23819, "lebanon</w>": 11695, "leblanc</w>": 46381, "lebo": 44184, "lebron</w>": 11971, "lebu": 47030, "lec": 944, "lec</w>": 35374, "leche</w>": 46197, "lect</w>": 45392, "lection</w>": 18252, "lections</w>": 30995, "lecture</w>": 6617, "lecturer</w>": 23795, "lectures</w>": 21118, "led": 8767, "led</w>": 912, "ledge": 23647, "ledge</w>": 4815, "ledger</w>": 26817, "leds</w>": 36763, "lee": 6224, "lee</w>": 2592, "leed": 16483, "leed</w>": 40206, "leeds": 38900, "leeds</w>": 7420, "leek</w>": 34585, "leeminho</w>": 37831, "leen": 35311, "leen</w>": 15940, "leep": 48875, "leep</w>": 10191, "lees": 29324, "lees</w>": 34056, "lef": 9152, "left": 33949, "left</w>": 1823, "leftist</w>": 35143, "lefto": 17437, "leftover</w>": 26414, "leftovers</w>": 28481, "lefty</w>": 33935, "leg": 1211, "leg</w>": 4924, "lega</w>": 38674, "legacy": 44108, "legacy</w>": 6447, "legal": 17743, "legal</w>": 3998, "legalization</w>": 40584, "legalize</w>": 42921, "legally</w>": 14152, "legate</w>": 46009, "lege</w>": 8065, "legen": 6105, "legend": 5480, "legend</w>": 3539, "legendary</w>": 6053, "legendof": 47915, "legends</w>": 6396, "leges</w>": 15356, "legg": 18474, "legg</w>": 32511, "legged</w>": 25830, "leggings</w>": 22895, "leggo</w>": 43441, "legi": 11183, "legion": 35503, "legion</w>": 14525, "legis": 7200, "legislat": 16486, "legislation</w>": 14143, "legislative</w>": 16755, "legislators</w>": 31572, "legislature</w>": 22309, "legit</w>": 12563, "legitim": 17656, "legitimate</w>": 24491, "lego": 28117, "lego</w>": 7849, "legos</w>": 45359, "legs</w>": 7072, "leh": 19105, "leh</w>": 29298, "lehead</w>": 28090, "lehigh</w>": 34527, "lehman</w>": 46094, "lei": 15828, "lei</w>": 21830, "leia</w>": 32723, "leic": 35073, "leica</w>": 30206, "leice": 10026, "leicester": 28795, "leicester</w>": 11510, "leicestershire</w>": 45358, "leigh": 14849, "leigh</w>": 9292, "leighton</w>": 30782, "leila</w>": 41342, "lein": 20026, "lein</w>": 28551, "leinster</w>": 32242, "leip": 36401, "leipzig</w>": 41860, "leis": 13133, "leisure</w>": 15849, "leit": 35446, "leith</w>": 34141, "lek": 26626, "lek</w>": 36535, "lel</w>": 46623, "lele</w>": 26075, "lem": 10213, "lem</w>": 8428, "leman</w>": 24478, "lemans</w>": 26694, "lement</w>": 9693, "lements</w>": 15833, "lemme</w>": 23318, "lemon": 12272, "lemon</w>": 7184, "lemonade</w>": 18884, "lemons</w>": 29576, "lemore</w>": 41147, "len": 3687, "len</w>": 2159, "lena</w>": 22038, "lend": 45397, "lend</w>": 24987, "lender</w>": 44734, "lenders</w>": 42443, "lending</w>": 20209, "lene</w>": 17628, "leness</w>": 36551, "leng": 7861, "length</w>": 10130, "lengths</w>": 31858, "lengthy</w>": 32624, "lenin</w>": 41760, "lennon</w>": 18360, "lennox</w>": 45748, "lenny": 48448, "lenny</w>": 30124, "leno</w>": 45357, "lenovo</w>": 25886, "lens</w>": 8666, "lenses</w>": 21264, "lent": 20943, "lent</w>": 22605, "lentil</w>": 41511, "lentils</w>": 44269, "leo": 24008, "leo</w>": 8312, "leon": 6581, "leon</w>": 9763, "leonard": 43849, "leonard</w>": 13142, "leonardo</w>": 20282, "leone</w>": 22864, "leop": 11234, "leopard</w>": 15931, "leopards</w>": 40996, "leopold</w>": 45501, "lep": 48884, "leppard</w>": 41656, "lepre": 45641, "ler": 5587, "ler</w>": 1803, "lero": 15067, "lerosis</w>": 35455, "leroy</w>": 32441, "lers</w>": 6247, "lery</w>": 38184, "les": 4339, "les</w>": 840, "lesbian</w>": 17419, "lesbians</w>": 43182, "lesh</w>": 32282, "lesley</w>": 25506, "lesli": 13649, "leslie</w>": 16244, "lesn": 39568, "lesnar</w>": 42223, "less": 3242, "less</w>": 1285, "lesser</w>": 20369, "lessly</w>": 13103, "lessness</w>": 24847, "lesson</w>": 7714, "lessons</w>": 7199, "lest": 24372, "lest</w>": 6794, "lester": 23157, "lester</w>": 24023, "lestwe": 29726, "lestweforget</w>": 30273, "let": 1898, "let</w>": 1094, "leta</w>": 34319, "lete</w>": 34078, "letes</w>": 6815, "leth": 30022, "leth</w>": 42462, "lethal</w>": 21905, "lethbridge</w>": 48390, "leti</w>": 34176, "letics</w>": 14504, "letit": 46423, "leto</w>": 32203, "leton": 37674, "leton</w>": 7462, "lets": 10448, "lets</w>": 3243, "letsgo": 16967, "letsgo</w>": 29789, "letstalk</w>": 35591, "lett": 22428, "lett</w>": 9778, "lette": 41798, "lette</w>": 10301, "letter": 15567, "letter</w>": 4861, "lettering</w>": 26382, "letterman</w>": 38447, "letters</w>": 9181, "letting</w>": 9510, "letto</w>": 35449, "lettu": 17933, "lettuce</w>": 18573, "leu": 15691, "leuke": 31031, "leukemia</w>": 32097, "leum</w>": 21571, "leur</w>": 45806, "lev": 17022, "lev</w>": 29950, "levan": 42543, "leve": 36271, "level": 21682, "level</w>": 2931, "leveled</w>": 48453, "levels</w>": 6295, "leven": 44792, "leven</w>": 34729, "lever": 20178, "lever</w>": 23094, "leverage</w>": 24030, "leveraging</w>": 37948, "levi": 25630, "levi</w>": 19113, "leviathan</w>": 41736, "levin</w>": 36949, "levine</w>": 26594, "levit": 22715, "levy</w>": 17147, "lew": 5063, "lew</w>": 25329, "lewan": 48349, "lewd</w>": 45241, "lewes</w>": 40431, "lewi": 19589, "lewis": 22043, "lewis</w>": 6020, "lewisham": 37385, "lewisham</w>": 47633, "lewishamilton</w>": 42960, "lewood</w>": 37951, "lex": 6586, "lex</w>": 9658, "lexa</w>": 48259, "lexi": 44231, "lexi</w>": 24679, "lexington</w>": 22308, "lexus</w>": 20694, "ley": 2565, "ley</w>": 1066, "leye</w>": 37061, "leys": 45609, "leys</w>": 14834, "leyton</w>": 46573, "lez</w>": 26442, "lf": 33960, "lf</w>": 22078, "lfc": 37826, "lfc</w>": 8267, "lfw</w>": 28514, "lg": 4546, "lg</w>": 11368, "lga</w>": 39348, "lgb": 25401, "lgbt": 11743, "lgbt</w>": 9592, "lgbti</w>": 42730, "lgbtq": 47625, "lgbtq</w>": 14939, "lgm</w>": 39389, "lh": 27794, "lh</w>": 31159, "lhp</w>": 45092, "lhs</w>": 33170, "li": 554, "li</w>": 4250, "lia": 26118, "lia</w>": 6964, "liability</w>": 29139, "liaison</w>": 39294, "liam": 5258, "liam</w>": 7167, "lian</w>": 18058, "liance</w>": 40864, "liar</w>": 16334, "liars</w>": 23863, "lias</w>": 46021, "lib": 10249, "lib</w>": 13345, "libby</w>": 36832, "libdems</w>": 40869, "liber": 3425, "liberal": 48032, "liberal</w>": 9985, "liberalism</w>": 40018, "liberals</w>": 15981, "liberated</w>": 38690, "liberation</w>": 19507, "liberia</w>": 32208, "libertarian</w>": 35067, "liberties</w>": 48623, "liberty": 23397, "liberty</w>": 8480, "libr": 2856, "libra</w>": 43038, "librarian</w>": 25148, "librarians</w>": 37806, "libraries</w>": 14277, "library": 25713, "library</w>": 3519, "libre": 49210, "libre</w>": 31681, "libs</w>": 26401, "liby": 36390, "libya</w>": 16417, "libyan</w>": 42319, "lic": 2508, "lic</w>": 3376, "lice</w>": 45691, "licen": 6706, "licence</w>": 20550, "license</w>": 10337, "licensed</w>": 18752, "licenses</w>": 36414, "licensing</w>": 24219, "lich": 23979, "lich</w>": 25875, "lick": 29197, "lick</w>": 17541, "licking</w>": 33013, "licks</w>": 42117, "lics</w>": 44552, "lid": 39369, "lid</w>": 17678, "lidge</w>": 45558, "lido</w>": 35683, "lids</w>": 41609, "lie": 6570, "lie</w>": 2538, "lieb</w>": 45387, "liebe</w>": 37749, "lied</w>": 6486, "lief</w>": 38428, "lien</w>": 45716, "lier</w>": 3626, "liers</w>": 19303, "lies": 37236, "lies</w>": 3205, "liest</w>": 14020, "liet</w>": 41107, "lieu": 20401, "lieu</w>": 35313, "lieutenant</w>": 22538, "lif": 16456, "life": 2666, "life</w>": 970, "lifeat": 27801, "lifeboat</w>": 37404, "lifecycle</w>": 49171, "lifein": 48447, "lifeis": 24824, "lifeisgood</w>": 46433, "lifel": 15025, "lifeline</w>": 38438, "lifelong</w>": 21358, "lifeof": 36061, "lifesaving</w>": 48016, "lifespan</w>": 49257, "lifestyle": 46512, "lifestyle</w>": 7037, "lifestyles</w>": 48521, "lifetime": 48737, "lifetime</w>": 9107, "liff</w>": 34404, "liffe</w>": 38942, "lift": 33146, "lift</w>": 6779, "lifted</w>": 16783, "lifter</w>": 38555, "lifting</w>": 10857, "lifts</w>": 18291, "lig": 19915, "lig</w>": 38493, "liga</w>": 16802, "ligam": 31077, "ligament</w>": 48705, "ligan</w>": 27962, "ligans</w>": 42133, "ligh": 7510, "light": 3885, "light</w>": 1395, "lighted</w>": 18404, "lighten</w>": 32717, "lightening</w>": 28170, "lighter</w>": 14102, "lighthouse</w>": 13717, "lighting</w>": 5799, "lightly</w>": 26878, "lightning</w>": 7756, "lightroom</w>": 41454, "lights</w>": 3073, "lightweight</w>": 16278, "ligu": 42920, "ligue</w>": 29196, "lik": 4831, "lik</w>": 18495, "like": 9175, "like</w>": 789, "liked</w>": 7112, "likefor": 48444, "likeli": 40666, "likelihood</w>": 48158, "likely</w>": 5256, "liken": 36084, "likes</w>": 4724, "liking</w>": 16810, "lil": 6012, "lil</w>": 4461, "lilac</w>": 33647, "lili": 26686, "lili</w>": 48411, "lilies</w>": 38110, "lillard</w>": 47016, "lille</w>": 38705, "lilli": 40920, "lillian</w>": 41563, "lilly": 47825, "lilly</w>": 21815, "lily": 23803, "lily</w>": 10647, "lim": 2377, "lim</w>": 17204, "lima</w>": 17589, "limb": 27061, "limb</w>": 32363, "limbo</w>": 46179, "limbs</w>": 34886, "lime": 17385, "lime</w>": 11193, "limel": 48658, "limer": 16915, "limerick</w>": 19501, "limestone</w>": 27272, "limit": 18933, "limit</w>": 9973, "limitations</w>": 32730, "limited": 49229, "limited</w>": 3472, "limiting</w>": 35812, "limitless</w>": 35833, "limits</w>": 11966, "limo</w>": 33166, "limous": 47287, "limpopo</w>": 47175, "lin": 1254, "lin</w>": 2424, "lina</w>": 26110, "lincol": 6239, "lincoln": 16957, "lincoln</w>": 7454, "lincolnshire</w>": 29014, "lind": 6492, "linda": 45410, "linda</w>": 10760, "linden": 44076, "linden</w>": 34832, "lindo</w>": 38467, "lindsay": 29846, "lindsay</w>": 16858, "lindsey": 29475, "lindsey</w>": 18128, "line": 3674, "line</w>": 1148, "linear</w>": 19816, "linebacker</w>": 29848, "lined</w>": 11842, "lineman</w>": 31501, "linen</w>": 20032, "liner</w>": 11618, "liners</w>": 24463, "lines</w>": 3418, "liness</w>": 28633, "lineup</w>": 7316, "lineups</w>": 33589, "ling": 4851, "ling</w>": 1358, "linger</w>": 29593, "lingerie</w>": 18473, "lingering</w>": 46494, "lings</w>": 11390, "lington": 27673, "lington</w>": 9002, "lingu": 34449, "lingui": 29942, "linguistic</w>": 46847, "linguistics</w>": 48651, "lining</w>": 11589, "link": 18433, "link</w>": 2468, "linke": 15088, "linked</w>": 11059, "linkedin</w>": 16302, "linkin": 40287, "linkin</w>": 49291, "linking</w>": 23296, "links</w>": 8113, "linn</w>": 37431, "lino": 41189, "lino</w>": 34995, "lins</w>": 6567, "linson</w>": 15401, "linton</w>": 36479, "linus</w>": 49303, "linux</w>": 14061, "lio</w>": 19395, "lion": 8872, "lion</w>": 5567, "lionel</w>": 19441, "lions</w>": 7093, "lip": 8630, "lip</w>": 8546, "lipo": 38795, "lipp": 38074, "lips</w>": 8847, "lipse</w>": 10351, "lipstick</w>": 15618, "liqu": 6310, "lique": 32680, "liqueur</w>": 43612, "liqui": 33817, "liquid": 18366, "liquid</w>": 10158, "liquidity</w>": 42812, "liquor</w>": 17828, "lis": 7297, "lis</w>": 12749, "lisa": 25236, "lisa</w>": 7424, "lisam": 43072, "lisboa</w>": 40052, "lisbon</w>": 17708, "lish": 12658, "lish</w>": 2354, "lished</w>": 22620, "lisle</w>": 21529, "lism</w>": 34390, "liss": 45489, "liss</w>": 35433, "lisse</w>": 49309, "list": 1734, "list</w>": 1998, "lista</w>": 37812, "listed</w>": 6457, "listen": 17454, "listen</w>": 2672, "listened</w>": 15347, "listener</w>": 34819, "listeners</w>": 26901, "listening</w>": 3656, "listens</w>": 25912, "lister</w>": 45109, "listing</w>": 8145, "listings</w>": 21987, "liston</w>": 48041, "lists</w>": 12281, "lit": 2213, "lit</w>": 4350, "lita</w>": 30100, "lite": 29273, "lite</w>": 13694, "litecoin</w>": 39063, "liter": 3085, "liter</w>": 34904, "literacy</w>": 12841, "literal</w>": 24269, "literally</w>": 4719, "literary</w>": 13586, "literature</w>": 11072, "litfest</w>": 40369, "lith": 37005, "lithium</w>": 22794, "litho": 31088, "lithograph</w>": 49022, "lithu": 21045, "lithuania</w>": 27068, "liti": 24292, "litigation</w>": 31769, "lito</w>": 47381, "litre</w>": 25786, "litres</w>": 39919, "litt": 1216, "litt</w>": 47583, "litter": 45431, "litter</w>": 17118, "litters</w>": 45300, "little": 7024, "little</w>": 1274, "littlemix</w>": 29731, "littlest</w>": 48969, "litur": 36830, "litz</w>": 30357, "liu</w>": 20466, "liv": 13895, "liv</w>": 19901, "livan</w>": 12785, "live": 3215, "live</w>": 1064, "lived</w>": 8867, "livel": 17973, "liveli": 26566, "livelihood</w>": 46497, "livelihoods</w>": 47716, "lively</w>": 19663, "liveme</w>": 35396, "livemusic</w>": 15688, "liven": 41057, "liveon": 22815, "livepd": 38742, "livepd</w>": 31899, "liver": 4755, "liver</w>": 12639, "liverpool": 29778, "liverpool</w>": 5366, "livery</w>": 23248, "lives</w>": 3247, "livesmatter</w>": 20348, "livestock</w>": 22079, "livestream</w>": 16844, "livetweet</w>": 38546, "livin</w>": 28061, "living": 10965, "living</w>": 2815, "livingston</w>": 30551, "lix</w>": 45068, "liz": 8632, "liz</w>": 12242, "liza</w>": 28787, "lizard</w>": 17221, "lizards</w>": 41991, "lizasober": 44487, "lizasoberano</w>": 45076, "lizz": 34430, "lizzie</w>": 29530, "lizzy</w>": 32306, "lj": 34211, "lj</w>": 32273, "lju": 44562, "lk": 39110, "lk</w>": 26596, "lka</w>": 21881, "ll": 1657, "ll</w>": 865, "lla</w>": 15419, "llama</w>": 36679, "llan": 17281, "llan</w>": 38728, "lland": 31150, "llc</w>": 17161, "lle": 26550, "lle</w>": 29732, "llen": 41197, "ller</w>": 7722, "llers</w>": 26426, "lli": 47015, "lli</w>": 13368, "llis</w>": 25518, "lll</w>": 27177, "llll": 34874, "llll</w>": 43485, "llo</w>": 19293, "lloy": 10092, "lloyd": 33339, "lloyd</w>": 12400, "llp</w>": 28042, "lls</w>": 40535, "lly</w>": 26379, "lm": 6981, "lm</w>": 15282, "lma": 4493, "lmao</w>": 5121, "lmaoo</w>": 32623, "lmaooo</w>": 33362, "lmaoooo</w>": 45232, "lmfa": 8928, "lmfao</w>": 11068, "lmfaooo</w>": 47658, "lmp</w>": 43575, "lms</w>": 30381, "ln": 31644, "ln</w>": 18654, "lng</w>": 22339, "lnp</w>": 39679, "lo": 549, "lo</w>": 2982, "loa</w>": 39678, "load": 4515, "load</w>": 2834, "loaded</w>": 6756, "loader</w>": 28492, "loading</w>": 9975, "loads</w>": 8691, "loaf": 26467, "loaf</w>": 18273, "loan": 28431, "loan</w>": 8176, "loans</w>": 14206, "lob": 11197, "lob</w>": 46606, "lobal</w>": 34574, "lobb": 27698, "lobby</w>": 12449, "lobbying</w>": 36047, "lobe</w>": 46325, "lobes</w>": 24148, "lobo</w>": 39323, "lobos</w>": 36586, "lobster</w>": 13793, "loc": 1378, "loc</w>": 25826, "local": 9202, "local</w>": 2029, "localized</w>": 49399, "locally</w>": 15603, "locals</w>": 15041, "locate</w>": 20490, "located</w>": 5677, "location</w>": 4372, "locations</w>": 9580, "loch": 20188, "loch</w>": 14101, "lock": 7201, "lock</w>": 4381, "lockdown</w>": 35636, "locke</w>": 29698, "locked</w>": 8371, "locker</w>": 14053, "lockhart</w>": 48642, "lockheed</w>": 36637, "locking</w>": 19978, "locks</w>": 13212, "lockscreen</w>": 42439, "loco</w>": 25555, "locom": 22798, "locomo": 46147, "locomotive</w>": 30439, "locu": 33635, "locust</w>": 46237, "lod": 45650, "lodge</w>": 10504, "loe": 30113, "loe</w>": 25484, "loeb</w>": 49334, "lof": 15011, "loff</w>": 31008, "loft": 35707, "loft</w>": 20049, "loftus</w>": 46689, "log": 3239, "log</w>": 7383, "logan": 20655, "logan</w>": 10569, "logans</w>": 40752, "logg</w>": 43002, "logged</w>": 31457, "logger</w>": 39089, "logging</w>": 24444, "logi": 3177, "logia</w>": 48031, "logic</w>": 10670, "logical</w>": 4791, "logically</w>": 24782, "logie</w>": 33445, "logies</w>": 7378, "login</w>": 31121, "logist</w>": 7407, "logistics</w>": 14755, "logists</w>": 12233, "logne</w>": 19911, "logo": 31480, "logo</w>": 5750, "logos</w>": 24879, "logs</w>": 22745, "logue</w>": 27785, "logy": 22721, "logy</w>": 1659, "loh": 49129, "loh</w>": 37983, "loi</w>": 35128, "loid</w>": 31408, "loin</w>": 21760, "loire</w>": 46040, "lois</w>": 27040, "lok": 19908, "lok</w>": 23575, "loki</w>": 24435, "lol": 10721, "lol</w>": 1824, "lola</w>": 19065, "lolita</w>": 42615, "lolla": 45483, "lolli": 27906, "lollipop</w>": 34605, "lolly</w>": 48264, "lolo": 16895, "lolo</w>": 37481, "lolol</w>": 25280, "lololol</w>": 34738, "lolz</w>": 35260, "lom": 9279, "loma</w>": 42889, "lombar": 25493, "lombard</w>": 46461, "lombardi</w>": 44346, "lomond</w>": 48941, "lon": 1235, "lon</w>": 6507, "london": 6835, "london</w>": 1789, "londonmarathon</w>": 35018, "lone": 22220, "lone</w>": 13576, "lonel": 28872, "loneliness</w>": 30310, "lonely": 34509, "lonely</w>": 12368, "lonelyplanet</w>": 44984, "long": 4792, "long</w>": 1538, "longe": 25793, "longer</w>": 5349, "longest</w>": 10731, "longevity</w>": 35354, "longh": 20286, "longhorn</w>": 41047, "longhorns</w>": 38295, "longing</w>": 38482, "longlive": 47840, "longs</w>": 43618, "longtime</w>": 19685, "loo": 731, "loo</w>": 11804, "look": 8874, "look</w>": 1012, "lookalike</w>": 38307, "lookbook</w>": 39184, "looked</w>": 4913, "lookin</w>": 11254, "looking": 36898, "looking</w>": 1312, "lookout</w>": 18330, "looks</w>": 1606, "lool</w>": 33125, "loom": 37440, "loom</w>": 17199, "looming</w>": 35384, "looms</w>": 30550, "loon</w>": 28222, "loona</w>": 48137, "looney</w>": 45315, "looo": 20902, "loool</w>": 36016, "looool</w>": 47038, "looooo": 31484, "loop": 19606, "loop</w>": 10408, "loops</w>": 21625, "loos</w>": 45723, "loose": 43815, "loose</w>": 9786, "loot</w>": 21518, "lop": 36734, "lop</w>": 17066, "lopes</w>": 49269, "lopez</w>": 12982, "lor": 2179, "lor</w>": 11335, "lord": 18896, "lord</w>": 3486, "lorde</w>": 35483, "lords</w>": 14969, "lore": 12880, "lore</w>": 27218, "loren": 13602, "loren</w>": 33398, "lorenzo</w>": 21342, "lores</w>": 34510, "loretta</w>": 40863, "lori": 20164, "lori</w>": 23095, "lorna</w>": 46316, "lorraine</w>": 27602, "lorry</w>": 31354, "los": 32217, "los</w>": 3087, "losange": 14037, "losangeles</w>": 14638, "lose": 43318, "lose</w>": 5354, "loser</w>": 18168, "losers</w>": 23201, "loses</w>": 14263, "losing</w>": 7918, "loss": 34761, "loss</w>": 4327, "losses</w>": 16909, "lost": 14258, "lost</w>": 2624, "lostdog</w>": 48482, "lot": 5132, "lot</w>": 1954, "loth</w>": 43625, "lothian</w>": 31360, "lothing</w>": 42058, "lotion</w>": 25260, "lotr</w>": 34165, "lots</w>": 2958, "lott</w>": 42854, "lotta</w>": 29125, "lotte": 16535, "lotte</w>": 7274, "lottery</w>": 16975, "lottie</w>": 48517, "lotto</w>": 28265, "lotus</w>": 13824, "lou": 2207, "lou</w>": 9745, "loubout": 38369, "loud": 22884, "loud</w>": 7464, "louder</w>": 25904, "loudest</w>": 49214, "loudly</w>": 39256, "lough": 21927, "lough</w>": 28045, "loughborough</w>": 49153, "loui": 42173, "louie</w>": 25790, "louis": 8916, "louis</w>": 4459, "louisa</w>": 40011, "louise": 32275, "louise</w>": 13076, "louisi": 12187, "louisiana</w>": 12946, "louisville</w>": 13860, "louisvuitton</w>": 44911, "loun": 6466, "lounge</w>": 7141, "lounging</w>": 45430, "lour": 29383, "lourdes</w>": 45071, "louvre</w>": 36995, "lov": 8923, "lov</w>": 21229, "lova</w>": 37394, "lovable</w>": 38565, "lovato</w>": 18960, "love": 2618, "love</w>": 793, "lovecraft</w>": 42405, "loved</w>": 3249, "lovefl</w>": 38884, "loveher</w>": 38306, "lovehim</w>": 45733, "loveis": 30931, "loveisland</w>": 30970, "loveislove</w>": 43603, "loveit</w>": 24764, "lovel": 8999, "lovelies</w>": 31412, "lovelondon</w>": 46493, "lovely": 33250, "lovely</w>": 2165, "lovemy": 20041, "lovemyjob</w>": 40130, "loven": 33754, "lover": 28508, "lover</w>": 7168, "lovers": 48416, "lovers</w>": 5973, "loves": 37773, "loves</w>": 3925, "lovethe": 33040, "lovethem</w>": 48298, "lovett</w>": 47095, "lovewins</w>": 47687, "loveyou": 39226, "loveyou</w>": 25964, "loveyour": 26462, "lovin": 33442, "lovin</w>": 16354, "loving": 29568, "loving</w>": 3721, "lovingly</w>": 44100, "low": 1049, "low</w>": 1042, "loway</w>": 16104, "lowe</w>": 17910, "lowed</w>": 22733, "lowell</w>": 24458, "lower": 32578, "lower</w>": 4909, "lowered</w>": 34968, "lowering</w>": 35261, "lowers</w>": 36398, "lowes</w>": 38515, "lowest</w>": 12098, "lowing</w>": 8283, "lowkey</w>": 29481, "lowry</w>": 27444, "lows</w>": 4406, "lox</w>": 41725, "loy": 4519, "loy</w>": 23929, "loyal</w>": 13032, "loyalty</w>": 14686, "loyd</w>": 44212, "loyed</w>": 29279, "loyment</w>": 18307, "loyola</w>": 32569, "lp": 22282, "lp</w>": 6392, "lpc</w>": 44092, "lpg</w>": 47905, "lpga</w>": 34295, "lps</w>": 32094, "lr": 20572, "lr</w>": 7041, "lrt</w>": 32996, "ls": 19051, "ls</w>": 1268, "lsd</w>": 43766, "lse": 46127, "lse</w>": 43886, "lsu": 35428, "lsu</w>": 15672, "lt": 13642, "lt</w>": 3333, "ltc</w>": 27664, "ltd</w>": 6802, "lte</w>": 25202, "lton</w>": 14237, "lu": 664, "lu</w>": 9657, "lub": 22469, "lub</w>": 11836, "lubbock</w>": 37660, "lubric": 40963, "luc": 7013, "luc</w>": 28014, "luca</w>": 21053, "lucas": 23425, "lucas</w>": 10225, "lucci</w>": 45849, "luce</w>": 46217, "lucent</w>": 41552, "lucer": 36042, "luch": 36646, "lucha</w>": 38449, "luci": 8787, "lucia</w>": 22290, "luciano</w>": 46365, "lucid</w>": 44540, "lucie</w>": 39461, "lucifer": 46224, "lucifer</w>": 27687, "lucille</w>": 47454, "lucin": 27523, "luck": 9647, "luck</w>": 2820, "luckiest</w>": 42469, "luckily</w>": 20100, "lucknow</w>": 29407, "lucky": 20495, "lucky</w>": 4133, "lucrative</w>": 41485, "lucy": 17262, "lucy</w>": 10120, "lud": 14288, "lude</w>": 28755, "ludo": 40141, "ludwig</w>": 30633, "lue</w>": 45199, "luf": 25264, "lufc</w>": 17818, "luffy</w>": 39047, "lufthan": 37769, "lufthansa</w>": 39145, "lug</w>": 45521, "lugg": 19673, "luggage</w>": 20138, "luhan</w>": 20975, "luigi</w>": 28444, "luis": 25231, "luis</w>": 11339, "luiz</w>": 39633, "lujah</w>": 31639, "luk</w>": 21652, "luka</w>": 34878, "lukaku</w>": 37177, "lukas</w>": 37941, "luke": 11970, "luke</w>": 5652, "lul": 20861, "lulla": 37019, "lullaby</w>": 41676, "lulu": 32052, "lulu</w>": 26935, "lum": 18112, "lum</w>": 5997, "lumb": 36231, "lumber": 27421, "lumber</w>": 34692, "lumi": 41437, "lumia</w>": 31912, "lumin": 15867, "luminous</w>": 37913, "lump</w>": 38704, "lumpur</w>": 34411, "lun": 3221, "lun</w>": 49390, "luna</w>": 14425, "lunar</w>": 16043, "lunatic</w>": 45874, "lunch": 10954, "lunch</w>": 2772, "luncheon</w>": 15104, "lunches</w>": 29705, "lunchtime</w>": 14330, "lund": 30975, "lund</w>": 20181, "lunes</w>": 35648, "lung": 38479, "lung</w>": 16271, "lungs</w>": 27366, "lup</w>": 27413, "lupita</w>": 49352, "lupus</w>": 36017, "lur": 14439, "lure</w>": 31376, "lures</w>": 46747, "lurking</w>": 29941, "lus</w>": 7158, "lusci": 38004, "luscious</w>": 39935, "lush": 40382, "lush</w>": 16263, "lust": 42071, "lust</w>": 12662, "lustre</w>": 46673, "luther": 21848, "luther</w>": 17208, "lutheran</w>": 27341, "luton</w>": 28288, "luv": 24726, "luv</w>": 8502, "lux": 3439, "lux</w>": 16704, "luxe</w>": 26373, "luxemb": 21314, "luxembour": 22712, "luxembourg</w>": 23949, "luxu": 16112, "luxurious</w>": 17292, "luxury": 12083, "luxury</w>": 5247, "luxurytravel</w>": 29010, "luz</w>": 41008, "lv": 10862, "lv</w>": 11184, "lvl</w>": 31256, "lw": 40515, "lw</w>": 35115, "lx</w>": 30789, "ly": 1251, "ly</w>": 597, "lydia</w>": 24316, "lyf</w>": 43688, "lyfe</w>": 30787, "lyft</w>": 32944, "lying</w>": 7175, "lyk</w>": 46376, "lyle</w>": 36828, "lym": 20087, "lyme</w>": 31167, "lymph": 30073, "lymphoma</w>": 37648, "lyn": 3957, "lyn</w>": 5054, "lynch": 31586, "lynch</w>": 13560, "lynd": 33416, "lynda</w>": 42959, "lyndon</w>": 48518, "lynn": 25303, "lynn</w>": 10667, "lynne</w>": 26900, "lynx</w>": 28941, "lyon</w>": 17176, "lyons</w>": 29453, "lyric": 24366, "lyric</w>": 21291, "lyrical</w>": 33358, "lyricist</w>": 49013, "lyrics</w>": 9551, "lyrix</w>": 46814, "lys</w>": 45054, "lyte</w>": 40059, "lywood</w>": 4012, "lz</w>": 30818, "lé": 39641, "m": 76, "m</w>": 332, "ma": 577, "ma</w>": 1226, "maa": 42774, "maa</w>": 21555, "maan</w>": 33668, "maar</w>": 48927, "maas": 43332, "mab</w>": 35639, "mabel</w>": 47319, "mable</w>": 23001, "mably</w>": 40082, "mabu": 44682, "mac": 1961, "mac</w>": 4945, "macar": 21558, "macaroni</w>": 41824, "macarthur</w>": 36785, "macau": 43984, "macau</w>": 33370, "macbeth</w>": 36321, "macbook</w>": 20617, "macdonald</w>": 20315, "mace</w>": 44869, "maced": 21102, "macedonia</w>": 27071, "macfar": 45374, "macfarlane</w>": 48825, "mach": 2637, "mach</w>": 35091, "machado</w>": 42318, "mache": 43220, "macher</w>": 29330, "machi": 41783, "machin": 17972, "machine": 11539, "machine</w>": 4169, "machinelearning</w>": 13621, "machinery</w>": 21858, "machines</w>": 11108, "machining</w>": 45562, "macho</w>": 43977, "macht</w>": 45225, "macin": 36533, "mack": 8590, "mack</w>": 12145, "mackay</w>": 32497, "macken": 48057, "mackenzie</w>": 22351, "mackerel</w>": 35002, "mackin": 26010, "macklemore</w>": 41758, "macle": 33843, "maclean</w>": 47137, "macleod</w>": 43684, "macmillan": 36364, "macmillan</w>": 35191, "macon</w>": 35818, "macos</w>": 45469, "macqu": 38365, "macquarie</w>": 40858, "macro": 20891, "macro</w>": 16626, "macron</w>": 24859, "macs</w>": 46548, "macy</w>": 17113, "macys</w>": 47652, "mad": 2740, "mad</w>": 3843, "mada</w>": 37799, "madagas": 24758, "madagascar</w>": 25744, "madam": 33634, "madam</w>": 27538, "madame</w>": 23507, "madd": 31717, "madden</w>": 19093, "maddie": 39959, "maddie</w>": 18875, "maddow</w>": 32644, "maddy</w>": 31734, "made": 5388, "made</w>": 1105, "madein": 13670, "madeira</w>": 33810, "madel": 34532, "madele": 29831, "madeleine</w>": 33264, "madeline</w>": 33905, "madewith": 28627, "madewithunity</w>": 43190, "madhu": 23000, "madhuri": 38346, "madhuridixit</w>": 43889, "madhya</w>": 48302, "madi": 6527, "madi</w>": 27282, "madison": 24798, "madison</w>": 8791, "madmen</w>": 45452, "madness</w>": 8755, "madon": 44852, "madonna</w>": 14137, "madra": 27416, "madras</w>": 42046, "madre</w>": 42130, "madri": 5529, "madrid</w>": 5909, "mads</w>": 41201, "madu": 34913, "madurai</w>": 49159, "maduro</w>": 32912, "mae": 16898, "mae</w>": 17339, "maer": 47088, "maestro</w>": 24140, "mafi": 47164, "mafia</w>": 14890, "mag": 1191, "mag</w>": 4508, "maga</w>": 8694, "magaz": 2974, "magazine</w>": 3113, "magazines</w>": 22253, "magdal": 29673, "mage": 46568, "mage</w>": 10923, "magee</w>": 43872, "magenta</w>": 38091, "magento</w>": 42442, "mages</w>": 31059, "maggi": 29611, "maggie": 41443, "maggie</w>": 14524, "maggio</w>": 49087, "magh": 45555, "magi": 19270, "magic": 13061, "magic</w>": 3778, "magical": 36408, "magical</w>": 7823, "magician</w>": 26368, "magin": 42678, "maging</w>": 41310, "magn": 10290, "magna</w>": 34076, "magne": 9921, "magnesium</w>": 36379, "magnet</w>": 18240, "magnetic</w>": 13838, "magnets</w>": 33030, "magni": 24297, "magnific": 9725, "magnificent</w>": 10724, "magnitude</w>": 22955, "magno": 21184, "magnolia</w>": 27123, "magnu": 45198, "magnum</w>": 23496, "magnus</w>": 26275, "magpie</w>": 45973, "mags</w>": 31021, "maguire</w>": 26470, "mah": 7206, "mah</w>": 10801, "maha": 12237, "maha</w>": 33983, "mahal</w>": 22301, "mahan": 45191, "mahar": 11635, "maharaj</w>": 38488, "maharashtra</w>": 19328, "mahat": 32434, "mahatma</w>": 40530, "mahe": 15756, "maher</w>": 29826, "mahesh": 33448, "mahesh</w>": 22095, "mahi": 32529, "mahi</w>": 38659, "mahin": 24113, "mahindra</w>": 31285, "mahmoud</w>": 41361, "mahog": 30804, "mahogany</w>": 33084, "mahon": 45864, "mahon</w>": 20371, "mahone</w>": 26634, "mai": 7138, "mai</w>": 14595, "maia</w>": 46585, "maid": 23148, "maid</w>": 10226, "maidan</w>": 37346, "maiden": 37011, "maiden</w>": 13809, "maids</w>": 27305, "maidstone</w>": 44395, "mail": 10478, "mail</w>": 2614, "mailbox</w>": 31482, "mailed</w>": 42314, "mailing</w>": 26680, "mailonline</w>": 26021, "mails</w>": 45213, "main": 3904, "main</w>": 2623, "maine": 18639, "maine</w>": 7836, "mained": 15609, "mainedcm</w>": 15845, "mainland</w>": 27629, "mainly</w>": 15280, "mains</w>": 33656, "mainst": 42102, "mainstream</w>": 18034, "maintain</w>": 12954, "maintained</w>": 26665, "maintaining</w>": 21964, "maintains</w>": 38335, "mainten": 9399, "maintenance</w>": 9610, "mais</w>": 28153, "maisie</w>": 47355, "maison": 37065, "maison</w>": 27626, "mait": 26387, "maize</w>": 35386, "maj": 2948, "maj</w>": 28723, "maja</w>": 47498, "maje": 9852, "majestic</w>": 15335, "majesty</w>": 21188, "major": 8008, "major</w>": 3350, "majority</w>": 10508, "majors</w>": 23597, "mak": 11271, "mak</w>": 19253, "makar": 42242, "makati</w>": 39402, "make": 3232, "make</w>": 1078, "makeaw": 45859, "makeinindia</w>": 42739, "makeit": 26308, "maken": 47093, "makeover</w>": 17926, "maker": 15196, "maker</w>": 4836, "makers</w>": 6577, "makerspace</w>": 42400, "makes</w>": 2088, "makeshift</w>": 43274, "makeu": 41707, "makeup": 26402, "makeup</w>": 5853, "makeyourown": 34090, "makeyourownlane</w>": 34823, "maki</w>": 34514, "makin": 43096, "makin</w>": 22407, "making": 17976, "making</w>": 1665, "makk": 39852, "maknae</w>": 44118, "mako</w>": 49061, "mal": 1662, "mal</w>": 3796, "mala</w>": 28290, "malade</w>": 36928, "malaga</w>": 35395, "malala</w>": 41137, "malam</w>": 48956, "malaria</w>": 24929, "malawi</w>": 23405, "malay": 5323, "malay</w>": 42430, "malayalam</w>": 34860, "malaysi": 39668, "malaysia</w>": 8146, "malaysian</w>": 21136, "malbec</w>": 47741, "malcol": 12645, "malcolm</w>": 14139, "maldives</w>": 16795, "male": 11326, "male</w>": 2801, "males</w>": 14426, "malhotra</w>": 28866, "mali": 6701, "mali</w>": 22669, "malia</w>": 46714, "malibu</w>": 21723, "malicious</w>": 42147, "malign": 41122, "malik</w>": 11394, "mall": 10984, "mall</w>": 6220, "mallorca</w>": 28082, "mallory</w>": 38968, "malls</w>": 36447, "malm": 44071, "malnutrition</w>": 41153, "malo</w>": 43518, "malone</w>": 19852, "maloney</w>": 45897, "mals</w>": 25370, "malt</w>": 21688, "malta</w>": 16989, "maltese</w>": 39838, "malvern</w>": 39356, "malware</w>": 24153, "mam": 4404, "mam</w>": 17778, "mama</w>": 7133, "mamamoo</w>": 36012, "mamas</w>": 42395, "mamba</w>": 44189, "mament</w>": 45690, "mami</w>": 43858, "mamma</w>": 34893, "mammal</w>": 33385, "mammals</w>": 31987, "mammoth</w>": 28022, "man": 723, "man</w>": 786, "mana": 29467, "mana</w>": 15837, "manafort</w>": 40108, "manag": 1830, "manage</w>": 9770, "managed</w>": 7928, "management</w>": 3319, "manager</w>": 3898, "managerial</w>": 44261, "managers</w>": 12853, "manages</w>": 29699, "managing</w>": 10892, "manas": 44188, "manatee</w>": 46558, "mance</w>": 2324, "manchester": 24424, "manchester</w>": 4651, "mancini</w>": 47681, "mancity</w>": 31538, "mancrush": 36945, "mancrushmonday</w>": 39307, "mand": 4325, "mand</w>": 27244, "mandala</w>": 41106, "mandarin</w>": 26455, "mandate</w>": 26228, "mandatory</w>": 19934, "mandel": 34960, "mandela</w>": 16280, "mandi": 38961, "mandir</w>": 35815, "mando</w>": 34006, "mands</w>": 12340, "mandu</w>": 31440, "mandy": 41505, "mandy</w>": 24302, "mane": 44471, "mane</w>": 16044, "maneu": 33216, "mang": 25616, "mang</w>": 31096, "manga</w>": 11873, "mangal": 43027, "manger</w>": 48251, "mango": 43831, "mango</w>": 13962, "mangrove</w>": 47180, "manhatt": 10152, "manhattan</w>": 10961, "mani": 5654, "mani</w>": 10718, "mania</w>": 8435, "maniac</w>": 31814, "maniacs</w>": 41444, "manian</w>": 40077, "manic": 23017, "manic</w>": 37825, "manicure</w>": 33637, "manife": 14379, "manifest</w>": 34422, "manifestation</w>": 48348, "manifesto</w>": 20907, "manil": 38827, "manila</w>": 10969, "manipu": 40261, "manipul": 19237, "manipulation</w>": 30277, "manipur</w>": 47757, "manish": 41759, "manish</w>": 44720, "manit": 15693, "manitoba</w>": 20342, "manjaro</w>": 41489, "mankind</w>": 24155, "manly</w>": 25194, "mann": 19396, "mann</w>": 4783, "manne": 30160, "manned</w>": 26139, "mannequin</w>": 43388, "manner</w>": 20700, "manners</w>": 31693, "manning</w>": 15996, "manny": 37054, "manny</w>": 20933, "mano": 15753, "mano</w>": 24016, "manoj</w>": 41146, "manor": 41830, "manor</w>": 13614, "mans": 28422, "mans</w>": 7746, "mansfield</w>": 25543, "manship</w>": 15460, "mansion</w>": 13404, "manslaughter</w>": 48632, "manson</w>": 26715, "mant": 25122, "mant</w>": 27037, "manta</w>": 41431, "mantis</w>": 39946, "mantle</w>": 22159, "mantra</w>": 25162, "manu": 3404, "manu</w>": 25799, "manual</w>": 12268, "manuel": 29171, "manuel</w>": 9567, "manufac": 5105, "manufacture</w>": 27741, "manufactured</w>": 24010, "manufacturer</w>": 15668, "manufacturers</w>": 18763, "manufacturing</w>": 8386, "manure</w>": 47907, "manus</w>": 28181, "manuscript</w>": 24365, "manuscripts</w>": 40765, "manutd</w>": 20994, "many": 28484, "many</w>": 1346, "manziel</w>": 40637, "mao": 47447, "mao</w>": 25605, "maori</w>": 43400, "map": 25180, "map</w>": 3923, "maple": 21980, "maple</w>": 10570, "mapleleafs</w>": 41257, "mapoli</w>": 28768, "mapp</w>": 36894, "mapped</w>": 41596, "mapping</w>": 15231, "maps</w>": 8765, "mapu": 42082, "mar": 675, "mar</w>": 3091, "mara</w>": 15655, "marais</w>": 47913, "maran": 44732, "marath": 16274, "marathi</w>": 34102, "marathon": 40764, "marathon</w>": 5910, "marau": 38475, "marbella</w>": 36182, "marble": 45429, "marble</w>": 13071, "marbles</w>": 42931, "marc": 14054, "marc</w>": 9075, "marca</w>": 38242, "marcel": 17726, "marcel</w>": 24652, "marcelo</w>": 35939, "march": 10638, "march</w>": 2227, "marche</w>": 36173, "marched</w>": 37976, "marches</w>": 38249, "marchfor": 31721, "marching</w>": 15082, "marchmadness</w>": 28555, "marci": 36698, "marcia</w>": 41075, "marck</w>": 47733, "marco": 24719, "marco</w>": 10924, "marcor": 39945, "marcorubio</w>": 41143, "marcos</w>": 21696, "marcu": 20760, "marcus": 48955, "marcus</w>": 9895, "mardi": 39728, "mardi</w>": 29229, "mardigras</w>": 43343, "mare": 26512, "mare</w>": 8870, "mares</w>": 19724, "marg</w>": 44014, "margar": 16838, "margare": 10232, "margaret</w>": 12185, "margarita</w>": 25958, "margaritas</w>": 42679, "margate</w>": 37428, "margin": 19464, "margin</w>": 21357, "marginal</w>": 38320, "margins</w>": 33763, "margot</w>": 37144, "mari": 2603, "mari</w>": 19322, "maria": 41109, "maria</w>": 6595, "mariachi</w>": 44299, "mariah": 31214, "mariah</w>": 24789, "mariahcarey</w>": 36538, "marian": 41129, "marian</w>": 24677, "mariana</w>": 44224, "marianne</w>": 32214, "mariano</w>": 43988, "marie": 20657, "marie</w>": 7864, "marietta</w>": 46634, "marig": 41002, "marijuana</w>": 9864, "maril": 14611, "marilyn": 38959, "marilyn</w>": 18489, "marin": 8910, "marin</w>": 23992, "marina</w>": 12060, "marinated</w>": 33406, "marine": 20674, "marine</w>": 5746, "mariner</w>": 39972, "mariners</w>": 19086, "marines</w>": 15018, "marino</w>": 30878, "mario": 39176, "mario</w>": 7600, "marion": 37765, "marion</w>": 18397, "maris": 21512, "maris</w>": 33093, "marisa</w>": 42938, "mariska</w>": 44703, "marissa</w>": 31219, "marist</w>": 48223, "mariti": 13124, "maritime</w>": 14331, "marj": 38639, "mark": 3805, "mark</w>": 2110, "marke": 2399, "marked</w>": 12360, "marker</w>": 18170, "markers</w>": 23664, "market": 11614, "market</w>": 2196, "marketer</w>": 33482, "marketers</w>": 23682, "marketing": 19535, "marketing</w>": 2905, "marketplace</w>": 18241, "markets</w>": 7292, "markham</w>": 39817, "marking</w>": 14705, "markings</w>": 41046, "markle</w>": 32672, "marko</w>": 38338, "marks</w>": 5466, "markus</w>": 33725, "marl": 24922, "marlborough</w>": 43515, "marlene</w>": 45117, "marley</w>": 16504, "marlin</w>": 34275, "marlins</w>": 23309, "marlon</w>": 32995, "marmalade</w>": 39068, "marnock</w>": 48305, "maro": 27029, "maroon</w>": 20501, "marqu": 20704, "marque": 13012, "marquee</w>": 27725, "marquette</w>": 37624, "marquez</w>": 27317, "marquis</w>": 33530, "marr</w>": 32871, "marrake": 37125, "marrakech</w>": 39006, "marri": 3839, "marriage": 38047, "marriage</w>": 7040, "marriages</w>": 38190, "married</w>": 6791, "marries</w>": 46283, "marriott</w>": 19211, "marrow</w>": 31030, "marry</w>": 13288, "marrying</w>": 40507, "mars": 41469, "mars</w>": 7496, "marsden</w>": 43344, "marse": 26577, "marseille</w>": 30365, "marsh": 9237, "marsh</w>": 13505, "marsha</w>": 21491, "marshal</w>": 26608, "marshall": 30939, "marshall</w>": 9811, "marshals</w>": 44175, "marshes</w>": 43450, "marshmal": 21069, "marshmallow</w>": 28530, "marshmallows</w>": 39471, "mart": 2348, "mart</w>": 7772, "marta</w>": 32858, "martens</w>": 43211, "marth": 34493, "martha</w>": 16427, "marti": 20577, "martial</w>": 17088, "martialarts</w>": 35895, "martian</w>": 30214, "martin": 6929, "martin</w>": 3690, "martina</w>": 34393, "martinez</w>": 13913, "marting": 47570, "martini</w>": 22199, "martino</w>": 41675, "martins</w>": 30569, "marty": 9926, "marty</w>": 17169, "martyn</w>": 44075, "martyr": 36155, "martyr</w>": 26067, "martyrdom</w>": 43110, "martyred</w>": 39114, "martyrs</w>": 24707, "maru": 37413, "maru</w>": 31838, "marvel": 13835, "marvel</w>": 5996, "marvelcomics</w>": 46897, "marvell": 26576, "marvellous</w>": 28402, "marvelous</w>": 25487, "marvin</w>": 19675, "marx": 30559, "marx</w>": 26001, "marxist</w>": 45205, "mary": 5146, "mary</w>": 2676, "maryam": 33636, "maryam</w>": 36393, "maryland</w>": 11379, "marys": 40905, "marys</w>": 40228, "mas": 5226, "mas</w>": 1412, "masa": 24995, "masa</w>": 41868, "masala</w>": 31483, "masc": 23564, "mascar": 46984, "mascara</w>": 31635, "mascot</w>": 13983, "mascots</w>": 43266, "mascul": 25589, "masculine</w>": 48269, "masculinity</w>": 40465, "mase": 49128, "maser": 25798, "maserati</w>": 30442, "mash": 12317, "mash</w>": 15680, "mashable</w>": 41026, "mashed</w>": 27395, "mashup</w>": 27079, "masi": 35965, "masjid</w>": 31420, "mask": 19262, "mask</w>": 8306, "masked</w>": 25757, "masking</w>": 47046, "masks</w>": 19055, "maslow</w>": 44359, "mason": 17424, "mason</w>": 9699, "masonic</w>": 36491, "masonry</w>": 30764, "masons</w>": 37195, "masqu": 26593, "masquer": 29604, "masquerade</w>": 36944, "mass": 4636, "mass</w>": 4854, "massach": 14484, "massachuse": 14577, "massachusetts</w>": 14756, "massacre</w>": 14696, "massage</w>": 13055, "masse": 41735, "masses</w>": 22978, "massey</w>": 29868, "massi": 17239, "massimo</w>": 45821, "massive</w>": 4818, "massively</w>": 34297, "mast": 45916, "mast</w>": 27920, "master": 4534, "master</w>": 3498, "mastercard</w>": 40542, "masterchef</w>": 34809, "masterclass</w>": 17529, "mastered</w>": 32616, "masterful</w>": 46823, "mastering</w>": 28326, "mastermind</w>": 34029, "masterpiece</w>": 12066, "masterpieces</w>": 37596, "masters</w>": 6913, "mastery</w>": 34800, "mastiff</w>": 42311, "maswar": 47887, "mat": 905, "mat</w>": 9063, "mata</w>": 17270, "match": 7733, "match</w>": 2439, "matcha</w>": 32433, "matchday</w>": 15947, "matched</w>": 17792, "matches</w>": 8609, "matching</w>": 11840, "matchup</w>": 19355, "matchups</w>": 49162, "mate": 6137, "mate</w>": 2936, "mated</w>": 33813, "mateo</w>": 34991, "mater</w>": 23724, "materi": 7084, "material</w>": 7118, "materials</w>": 8161, "maternal</w>": 26131, "maternity</w>": 23894, "mates</w>": 5817, "math": 13277, "math</w>": 6025, "mathe": 8725, "mathemat": 11901, "mathematical</w>": 25609, "mathematician</w>": 41036, "mathematics</w>": 20113, "mathew</w>": 36333, "mathews</w>": 37120, "mathi": 23014, "mathieu</w>": 40417, "maths</w>": 14763, "mati": 12716, "mati</w>": 32268, "matic": 36859, "matic</w>": 7900, "matically</w>": 38282, "matics</w>": 23634, "matil": 26751, "matilda</w>": 36308, "matin</w>": 44849, "matinee</w>": 38525, "mating</w>": 34346, "mation</w>": 11701, "matisse</w>": 43446, "mato": 13127, "matologist</w>": 48842, "matology</w>": 27940, "matory</w>": 25519, "matri": 27041, "matrix</w>": 18078, "mats</w>": 22259, "matsu": 30242, "matt": 7972, "matt</w>": 3972, "mattb": 42791, "matte": 31237, "matte</w>": 19771, "mattel</w>": 35365, "matteo</w>": 33120, "matter": 30471, "matter</w>": 3828, "matters</w>": 5708, "matth": 41846, "matthe": 5116, "matthew": 17588, "matthew</w>": 7008, "matthews</w>": 16739, "matthi": 29853, "matthias</w>": 45104, "matti": 39840, "mattress</w>": 23438, "matty": 31233, "matty</w>": 29176, "matu": 40616, "matur": 22897, "mature</w>": 14417, "maturity</w>": 28047, "mau": 8134, "mau</w>": 23033, "maui</w>": 20463, "maul": 30725, "maur": 10574, "maure": 25191, "maureen</w>": 31723, "maurice</w>": 20200, "mauricio</w>": 39066, "mauriti": 28406, "mauritius</w>": 29305, "mauro</w>": 41691, "mav": 25697, "maver": 16700, "maverick</w>": 27425, "mavericks</w>": 30092, "mavs</w>": 30665, "maw": 39351, "maw</w>": 42271, "mawards</w>": 37682, "max": 4898, "max</w>": 3902, "maxi": 8554, "maxi</w>": 23266, "maxim": 19892, "maxim</w>": 38574, "maximize</w>": 28673, "maximum</w>": 13162, "maximus</w>": 44312, "maxine</w>": 38468, "maxwell</w>": 19611, "maxx</w>": 37466, "may": 1686, "may</w>": 1270, "maya": 45783, "maya</w>": 12987, "mayan</w>": 37952, "maybe</w>": 3746, "mayday</w>": 29957, "mayer</w>": 21196, "mayfair</w>": 35171, "mayfield</w>": 33933, "mayhem</w>": 21502, "maymay": 26600, "maymay</w>": 33853, "maymayentrata</w>": 30480, "maynard</w>": 32487, "mayne</w>": 35771, "mayo": 22449, "mayo</w>": 11280, "mayor": 15429, "mayor</w>": 4676, "mayoral</w>": 28983, "mayorof": 43533, "mayors</w>": 28501, "mays</w>": 35445, "maythe</w>": 42281, "mayward": 45751, "mayward</w>": 23519, "mayweather</w>": 22774, "maz": 9177, "maz</w>": 36215, "mazda</w>": 18506, "maze</w>": 21988, "mazz": 29439, "mañ": 37059, "mañana</w>": 39354, "mb": 758, "mb</w>": 3996, "mba</w>": 8329, "mban</w>": 46685, "mbar": 44452, "mbb</w>": 10736, "mbc</w>": 20137, "mbe": 38395, "mbe</w>": 27004, "mber": 5467, "mber</w>": 1034, "mberg</w>": 26372, "mbers</w>": 5443, "mbi</w>": 45347, "mble": 20310, "mble</w>": 4756, "mbles</w>": 28693, "mbling</w>": 28604, "mbo": 25733, "mbo</w>": 11319, "mbps</w>": 44896, "mbs</w>": 10370, "mbta</w>": 38979, "mbu</w>": 42228, "mbuhari</w>": 36752, "mc": 1278, "mc</w>": 4126, "mca": 40570, "mca</w>": 14635, "mcal": 28663, "mcar": 43776, "mcbride</w>": 35080, "mcc</w>": 21192, "mccabe</w>": 37628, "mccaf": 47385, "mccain</w>": 20397, "mccall</w>": 34844, "mccann</w>": 27140, "mccar": 9570, "mccarthy</w>": 16974, "mccartney</w>": 19958, "mccl": 24709, "mccla": 43672, "mccle": 40139, "mcclure</w>": 44945, "mcco": 46152, "mccon": 32638, "mccor": 23057, "mccormack</w>": 45164, "mccormick</w>": 39088, "mccoy</w>": 20218, "mccr": 41996, "mccre": 25393, "mccul": 38833, "mccull": 41782, "mcd</w>": 28930, "mcder": 27355, "mcdermott</w>": 34504, "mcdon": 12171, "mcdonald</w>": 10741, "mcdonalds</w>": 17674, "mcdonnell</w>": 34360, "mcdowell</w>": 34119, "mce": 26864, "mcel": 28752, "mcen": 47423, "mcfad": 36976, "mcfadden</w>": 42105, "mcfar": 29020, "mcfarlane</w>": 47174, "mcfc</w>": 16416, "mcfly</w>": 38211, "mcg": 42507, "mcg</w>": 27995, "mcgee</w>": 29223, "mcgill": 46524, "mcgill</w>": 35511, "mcgin": 29596, "mcgowan</w>": 40462, "mcgr": 25169, "mcgra": 29367, "mcgrath</w>": 28759, "mcgraw</w>": 40950, "mcgregor</w>": 19642, "mcgu": 34294, "mcguinness</w>": 45299, "mcguire</w>": 32635, "mci": 46212, "mci</w>": 45491, "mcil": 30481, "mcin": 18770, "mcintosh</w>": 45353, "mcintyre</w>": 33369, "mck": 6781, "mckay</w>": 33611, "mcke": 27424, "mckee</w>": 43529, "mcken": 42619, "mckenna</w>": 24924, "mckenzie</w>": 25502, "mckin": 15437, "mckinley</w>": 39891, "mckinney</w>": 33554, "mckinnon</w>": 48736, "mckinsey</w>": 48143, "mcl": 49021, "mcla": 12565, "mclaren": 37381, "mclaren</w>": 16789, "mclau": 32285, "mclaughlin</w>": 35346, "mcle": 25299, "mclean</w>": 28666, "mcleod</w>": 40259, "mcm</w>": 12251, "mcmahon</w>": 24026, "mcmaster</w>": 42703, "mcmillan</w>": 45603, "mcn": 42919, "mcnam": 32682, "mcnamara</w>": 37506, "mcne": 42545, "mco</w>": 33723, "mcqueen</w>": 22544, "mcr": 29884, "mcr</w>": 16966, "mcs</w>": 27020, "mcu</w>": 30403, "md": 8637, "md</w>": 4732, "mdc": 38773, "mdc</w>": 41761, "mds</w>": 48746, "mdt</w>": 40822, "me": 613, "me</w>": 614, "mea": 46045, "mea</w>": 17711, "mead": 12134, "mead</w>": 21567, "meade</w>": 37218, "meado": 16402, "meadow": 25213, "meadow</w>": 17195, "meadows</w>": 17178, "meal": 29662, "meal</w>": 5478, "meals</w>": 11229, "mean": 4189, "mean</w>": 3450, "meand": 48015, "meaning": 14586, "meaning</w>": 8342, "meaningful</w>": 17480, "meaningless</w>": 48932, "meanings</w>": 45814, "means</w>": 3494, "meant</w>": 8674, "meantime</w>": 27499, "meanwhile</w>": 9650, "meas": 5867, "measles</w>": 38230, "measurable</w>": 48010, "measure": 15261, "measure</w>": 10579, "measured</w>": 23154, "measurement</w>": 20973, "measurements</w>": 29894, "measures</w>": 11936, "measuring</w>": 18064, "meat": 10805, "meat</w>": 6480, "meatball</w>": 43642, "meatballs</w>": 29233, "meath</w>": 37920, "meatless": 48085, "meats</w>": 29558, "mec</w>": 27432, "mecca</w>": 36095, "mech</w>": 38305, "mechan": 6715, "mechanic</w>": 24582, "mechanical</w>": 14467, "mechanics</w>": 20536, "mechanism</w>": 22576, "mechanisms</w>": 28610, "meck": 41908, "med": 1948, "med</w>": 2177, "meda</w>": 33614, "medal": 29714, "medal</w>": 6974, "medalist</w>": 21040, "medalists</w>": 43397, "medalli": 31349, "medallion</w>": 43469, "medallist</w>": 41472, "medals</w>": 14710, "mede</w>": 48225, "meded</w>": 27627, "medi": 1436, "media": 22064, "media</w>": 1895, "mediac": 37490, "median</w>": 30491, "mediation</w>": 42829, "medic": 3602, "medic</w>": 35441, "medicaid</w>": 25421, "medical": 18432, "medical</w>": 4116, "medicare</w>": 23710, "medication</w>": 23771, "medications</w>": 37181, "medicinal</w>": 28772, "medicine</w>": 5616, "medicines</w>": 26541, "medics</w>": 46688, "medieval": 38956, "medieval</w>": 10789, "medina</w>": 27281, "mediocre</w>": 41170, "medit": 19130, "meditate</w>": 38039, "meditation</w>": 10827, "mediter": 14194, "mediterran": 14358, "mediterranean</w>": 15327, "medium</w>": 8675, "medley</w>": 24793, "meds</w>": 25075, "medtech</w>": 42044, "medusa</w>": 44216, "medway</w>": 42286, "mee": 1725, "mee</w>": 14075, "meek</w>": 28935, "meen": 37940, "meen</w>": 46515, "meer": 26714, "meer</w>": 27555, "meet": 5714, "meet</w>": 1633, "meeting": 48566, "meeting</w>": 2071, "meetings</w>": 9980, "meets</w>": 5972, "meetthe": 27575, "meetup</w>": 15430, "meg": 11500, "meg</w>": 16186, "mega": 15979, "mega</w>": 9068, "megab": 38103, "megadeth</w>": 46741, "megal": 37650, "megam": 26073, "megan": 19127, "megan</w>": 11503, "megap": 33624, "megat": 35581, "megh": 31192, "meghan": 39939, "meghan</w>": 18261, "meh": 10512, "meh</w>": 22211, "mehta</w>": 25031, "mei": 22564, "mei</w>": 25198, "meier</w>": 29812, "mein": 28857, "mein</w>": 21466, "meister</w>": 28407, "mek</w>": 44645, "mel": 1902, "mel</w>": 6834, "mela</w>": 35032, "melan": 22261, "melanch": 44818, "melancholy</w>": 47821, "melani": 34031, "melania</w>": 32796, "melanie</w>": 22153, "melanoma</w>": 40862, "melb": 47007, "melb</w>": 28980, "melbourne": 28387, "melbourne</w>": 6995, "melee</w>": 45108, "meli": 28885, "melinda</w>": 46303, "melis": 18913, "melissa": 41866, "melissa</w>": 13030, "mell": 22531, "mell</w>": 41583, "mello</w>": 47594, "mellon</w>": 45162, "mellow</w>": 32034, "melo": 10354, "melo</w>": 22374, "melodic</w>": 41877, "melodies</w>": 38412, "melody</w>": 19119, "melon</w>": 12146, "melrose</w>": 36296, "melt": 22209, "melt</w>": 15957, "meltdown</w>": 30613, "melted</w>": 23037, "melting</w>": 19247, "melton</w>": 46062, "melts</w>": 31446, "melville</w>": 46030, "melvin</w>": 31544, "mely</w>": 6373, "mem": 4937, "mem</w>": 34944, "memb": 2114, "member": 29566, "member</w>": 1640, "members</w>": 2567, "membership</w>": 11562, "membrane</w>": 34088, "meme": 35157, "meme</w>": 9169, "memes</w>": 12828, "memo": 15967, "memo</w>": 19334, "memoir</w>": 20532, "memoirs</w>": 45311, "memor": 1858, "memorab": 26271, "memorabilia</w>": 27488, "memorable</w>": 13172, "memorial": 16285, "memorial</w>": 4642, "memorialday</w>": 21598, "memoriam</w>": 48191, "memories</w>": 4304, "memory": 44766, "memory</w>": 5137, "memph": 10285, "memphis": 38432, "memphis</w>": 11298, "men": 1552, "men</w>": 1656, "mena</w>": 23052, "menace</w>": 29949, "mend": 8151, "mend</w>": 46927, "mendel": 49268, "mendes</w>": 18060, "mendez</w>": 48275, "mendo": 19327, "mendoza</w>": 23680, "meng": 37102, "meng</w>": 37450, "mening": 46428, "menon</w>": 38255, "menopau": 34974, "menopause</w>": 46026, "mens": 16924, "mens</w>": 10495, "mensfashion</w>": 27578, "menstru": 28345, "menstrual</w>": 40915, "menswear</w>": 18803, "ment": 1585, "ment</w>": 777, "mental": 8611, "mental</w>": 3448, "mentalhealth": 20593, "mentalhealth</w>": 13022, "mentality</w>": 26647, "mentally</w>": 14307, "mentary</w>": 4468, "mentation</w>": 9512, "mentday</w>": 40397, "mente": 40302, "mente</w>": 36396, "mented</w>": 9249, "menting</w>": 14471, "mention": 43881, "mention</w>": 6762, "mentioned</w>": 11948, "mentioning</w>": 34290, "mentions</w>": 12334, "mento</w>": 30582, "mentor": 45342, "mentor</w>": 11642, "mentoring</w>": 19610, "mentors</w>": 20945, "mentorship</w>": 33878, "ments</w>": 1827, "menu</w>": 6225, "menus</w>": 33534, "meo</w>": 30792, "meow": 39965, "meow</w>": 17246, "mep</w>": 27095, "mer": 1316, "mer</w>": 2452, "mera</w>": 20028, "merc": 34357, "merc</w>": 44399, "mercado</w>": 45479, "merce": 8409, "mercede": 34959, "mercedes": 26403, "mercedes</w>": 10685, "mercedesam": 40107, "mercedesbenz</w>": 32347, "mercen": 40301, "mercer</w>": 21632, "merch</w>": 11504, "merchandi": 14954, "merchandise</w>": 16808, "merchandising</w>": 49196, "merchant</w>": 19563, "merchants</w>": 34427, "merci": 23364, "merci</w>": 29378, "mercur": 11471, "mercury": 45203, "mercury</w>": 12653, "mercy": 33249, "mercy</w>": 10815, "mere": 29657, "mere</w>": 10342, "mered": 24657, "mered</w>": 32297, "meredith</w>": 25103, "merely</w>": 28718, "merge</w>": 30406, "merged</w>": 46492, "merger</w>": 24744, "merging</w>": 49256, "meri": 17993, "meri</w>": 36109, "meria</w>": 48433, "meric</w>": 27097, "merica</w>": 30561, "meridi": 37901, "meridian</w>": 31195, "mering</w>": 41060, "meringue</w>": 41661, "merino</w>": 42648, "merit</w>": 20830, "merkel</w>": 24715, "merle</w>": 48586, "merlin</w>": 26517, "merlot</w>": 40424, "mermaid</w>": 16064, "mermaids</w>": 43617, "mero</w>": 19097, "merr": 48288, "merri": 21462, "merrill</w>": 47713, "merritt</w>": 36462, "merry": 14167, "merry</w>": 5779, "merrychristmas</w>": 19672, "mers</w>": 4199, "mersal</w>": 36711, "mersey": 25248, "mersey</w>": 46239, "merseyside</w>": 35382, "mert</w>": 48496, "merton</w>": 35315, "mery</w>": 40873, "meryl</w>": 35787, "mes": 28432, "mes</w>": 3029, "mesa</w>": 18956, "mese</w>": 42018, "mesh</w>": 15030, "mesm": 18695, "mesmer": 38435, "mesmeri": 25985, "mesmerizing</w>": 35637, "meso": 25537, "mesqu": 46819, "mess": 2490, "mess</w>": 8188, "message</w>": 3918, "messages</w>": 9390, "messaging</w>": 23234, "messe</w>": 40391, "messed</w>": 23580, "messenger</w>": 17389, "messi": 19394, "messi</w>": 11252, "messiah</w>": 28737, "messing</w>": 23144, "messy</w>": 15987, "mest</w>": 23780, "mester</w>": 47349, "mesut</w>": 49177, "met": 5249, "met</w>": 2340, "meta": 14803, "meta</w>": 22701, "metab": 16150, "metabol": 48389, "metaboli": 25573, "metabolic</w>": 34311, "metabolism</w>": 27824, "metal": 8935, "metal</w>": 4044, "metall": 19084, "metallic</w>": 17257, "metallica</w>": 24079, "metals</w>": 21375, "metam": 28862, "metamor": 39030, "metamorpho": 47601, "metaph": 24189, "metaphor</w>": 34233, "metast": 41973, "mete": 11226, "meteor": 26429, "meteor</w>": 26823, "meteoro": 25948, "meteorologist</w>": 42849, "meter</w>": 10104, "meters</w>": 13247, "metgala</w>": 30089, "meth": 21867, "meth</w>": 26177, "methane</w>": 37565, "metho": 5770, "method</w>": 10284, "methodist</w>": 25165, "methodo": 28488, "methodology</w>": 37316, "methods</w>": 12200, "methyl": 48999, "metmuseum</w>": 28207, "meto": 25679, "metoo</w>": 24722, "metr": 15086, "metre</w>": 27889, "metres</w>": 19798, "metric</w>": 19950, "metrical</w>": 40704, "metrics</w>": 24396, "metro": 7257, "metro</w>": 6784, "metroid</w>": 39957, "metropolis</w>": 40476, "metropolitan</w>": 19013, "metry</w>": 20039, "mets</w>": 9633, "mett</w>": 28081, "metz</w>": 40506, "meu</w>": 34520, "mew</w>": 40368, "mex": 3213, "mex</w>": 18387, "mexic": 31728, "mexican": 37442, "mexican</w>": 8186, "mexicans</w>": 47729, "mexico": 31834, "mexico</w>": 4604, "mey": 28584, "mey</w>": 27777, "meyer</w>": 13963, "meyers</w>": 32326, "mez": 30615, "mez</w>": 46833, "mezz": 38771, "mf": 18199, "mf</w>": 11067, "mfa</w>": 24107, "mfc</w>": 39474, "mfg</w>": 21912, "mfw</w>": 27309, "mg": 10003, "mg</w>": 8014, "mga</w>": 23954, "mgm</w>": 27572, "mgmt</w>": 22288, "mgr</w>": 31500, "mgs</w>": 48073, "mgt</w>": 48663, "mh": 9962, "mh</w>": 10834, "mha</w>": 41944, "mhealth</w>": 41225, "mhs</w>": 28815, "mhz</w>": 31550, "mi": 714, "mi</w>": 2251, "mia</w>": 5852, "miam": 31053, "miami": 15106, "miami</w>": 4891, "mian</w>": 24792, "miaw</w>": 36046, "mib</w>": 48178, "mic": 1213, "mic</w>": 3816, "mica</w>": 41551, "micah</w>": 33870, "mice</w>": 19030, "mich": 25628, "mich</w>": 23029, "micha": 2083, "michael": 6051, "michael</w>": 2511, "michaela</w>": 41897, "michaeljackson</w>": 33532, "michaels</w>": 23868, "michal": 47144, "miche": 37966, "micheal</w>": 43709, "michel": 5158, "michel</w>": 17153, "michelangelo</w>": 41245, "michele</w>": 20642, "michelin</w>": 26330, "michelle": 19028, "michelle</w>": 8625, "michi": 5658, "michigan": 32344, "michigan</w>": 6296, "mick": 15171, "mick</w>": 12592, "mickey": 41813, "mickey</w>": 13053, "micky</w>": 43011, "micro": 3160, "micro</w>": 11374, "microbes</w>": 44671, "microbi": 19496, "microbial</w>": 30335, "microbiology</w>": 35348, "microbiome</w>": 35148, "micron": 48742, "microphone</w>": 24643, "micropoetry</w>": 35997, "microscope</w>": 29114, "microscopy</w>": 38431, "microsof": 42424, "microsoft": 38650, "microsoft</w>": 7254, "microwave</w>": 24240, "mics</w>": 16554, "mid": 2192, "mid</w>": 4734, "midcentury</w>": 48988, "midd": 2983, "midday</w>": 23390, "middle": 9849, "middle</w>": 3694, "middleeast</w>": 32783, "middles": 29769, "middlesbrough</w>": 32436, "middlesex</w>": 39154, "middleton</w>": 23627, "middleweight</w>": 35829, "midfield</w>": 28116, "midfielder</w>": 13423, "midget</w>": 30734, "midi": 39496, "midi</w>": 27326, "midland</w>": 24822, "midlands</w>": 18062, "midnight": 35746, "midnight</w>": 6302, "mids</w>": 40821, "midst</w>": 24752, "midsummer</w>": 35234, "midterm</w>": 34365, "midterms</w>": 32015, "midtown</w>": 26069, "midway</w>": 26536, "midweek</w>": 29120, "midwest</w>": 16627, "midwi": 44802, "midwife</w>": 37681, "midwives</w>": 42355, "mie": 20865, "mie</w>": 10555, "miento</w>": 46482, "mier</w>": 36490, "mies</w>": 8840, "miff": 49398, "mig</w>": 28743, "might</w>": 2727, "mighty": 26632, "mighty</w>": 7815, "mign": 41678, "migos</w>": 44640, "migr": 3736, "migra": 28186, "migraine</w>": 35360, "migrant</w>": 18902, "migrants</w>": 15814, "migrate</w>": 41804, "migrating</w>": 43604, "migration</w>": 11891, "migu": 12279, "miguel": 33672, "miguel</w>": 14436, "miho</w>": 46870, "mii</w>": 39896, "mik": 15096, "mik</w>": 46203, "mika": 28609, "mika</w>": 25185, "mike": 5884, "mike</w>": 3178, "mikel</w>": 48865, "mikequind": 33508, "mikequindazzi</w>": 33551, "mikey": 34934, "mikey</w>": 23368, "mikha": 30999, "mikhail</w>": 38327, "miki</w>": 48863, "miko</w>": 35413, "miku</w>": 37703, "mil": 1469, "mil</w>": 12826, "mila</w>": 26183, "milan": 30380, "milan</w>": 8552, "milano</w>": 18585, "milb</w>": 42248, "mild</w>": 16085, "mildly</w>": 49059, "mile": 7833, "mile</w>": 6243, "mileage</w>": 30579, "miler</w>": 44680, "miles</w>": 3446, "milestone</w>": 13485, "milestones</w>": 34025, "miley": 25336, "miley</w>": 14321, "mileycyrus</w>": 28528, "milf</w>": 45386, "milford</w>": 35840, "mili": 16698, "miliband</w>": 41440, "milit": 3715, "militant</w>": 33629, "militants</w>": 23974, "military": 24498, "military</w>": 4323, "militi": 46625, "militia</w>": 32114, "milk": 13409, "milk</w>": 5205, "milkshake</w>": 29066, "milky": 37320, "milky</w>": 21120, "milkyway</w>": 43246, "mill": 4221, "mill</w>": 6637, "milla</w>": 49381, "millan": 34930, "millan</w>": 22188, "millar</w>": 41851, "mille</w>": 34066, "millen": 48501, "millenni": 10406, "millennial</w>": 28357, "millennials</w>": 18804, "millennium</w>": 21116, "miller": 21699, "miller</w>": 5733, "milli": 5340, "millie</w>": 29283, "milling</w>": 39133, "million": 13154, "million</w>": 2506, "millionaire</w>": 25179, "millionaires</w>": 47159, "millions</w>": 8492, "mills</w>": 10331, "millwall</w>": 35902, "milly</w>": 45794, "milne</w>": 44590, "milner</w>": 45230, "milo</w>": 24548, "milton": 39004, "milton</w>": 17360, "milwau": 13452, "milwaukee</w>": 14259, "mim</w>": 39379, "mimi</w>": 27086, "mimic": 47116, "mimic</w>": 46519, "mimo": 45551, "min": 771, "min</w>": 3331, "mina</w>": 15281, "minaj</w>": 25136, "minal</w>": 40222, "minat</w>": 33275, "mince</w>": 32396, "mind": 5890, "mind</w>": 2575, "mindanao</w>": 44228, "minded</w>": 21330, "mindful</w>": 28457, "mindfulness</w>": 15707, "minding</w>": 45337, "minds</w>": 9244, "mindset</w>": 14217, "mindy": 46875, "mindy</w>": 38551, "mine": 20149, "mine</w>": 3347, "minecraft</w>": 15678, "mined</w>": 48034, "minent</w>": 12533, "miner": 14109, "miner</w>": 26572, "mineral</w>": 17692, "minerals</w>": 21169, "miners</w>": 22119, "mines</w>": 16211, "ming": 10868, "ming</w>": 2107, "mingham</w>": 7590, "mingle</w>": 38437, "mingly</w>": 36909, "mington": 49283, "mington</w>": 23119, "minh</w>": 48734, "minho</w>": 21318, "mini": 1810, "mini</w>": 3954, "miniature": 44298, "miniature</w>": 16377, "miniatures</w>": 38816, "minic": 31522, "minim": 10005, "minimal</w>": 18458, "minimalism</w>": 42594, "minimalist</w>": 26641, "minimize</w>": 38697, "minimum</w>": 12244, "minindia</w>": 28458, "mining</w>": 8473, "minion</w>": 28622, "minions</w>": 27035, "minis": 33409, "minis</w>": 35976, "minister": 25688, "minister</w>": 3569, "ministerial</w>": 33008, "ministers</w>": 16406, "ministries</w>": 27895, "ministry</w>": 8742, "mink</w>": 42017, "minn": 45991, "minn</w>": 47318, "minne": 7083, "minneapolis</w>": 16977, "minneso": 9380, "minnesota</w>": 9968, "minnie</w>": 24493, "mino</w>": 22791, "minogue</w>": 44202, "minor</w>": 8522, "minorities</w>": 28119, "minority</w>": 16210, "minors</w>": 36789, "mins</w>": 6196, "minsk</w>": 46151, "minster</w>": 11189, "mint": 48084, "mint</w>": 7506, "minted</w>": 49377, "minton</w>": 20050, "minu": 29064, "minus</w>": 15358, "minute": 28931, "minute</w>": 4497, "minutes</w>": 3056, "mio</w>": 26366, "mir": 2750, "mir</w>": 6585, "mira": 21665, "mira</w>": 22762, "mirac": 13685, "miracle": 49208, "miracle</w>": 11543, "miracles</w>": 23478, "miraculous</w>": 38671, "mirage</w>": 28679, "mirai</w>": 49060, "mirand": 32367, "miranda</w>": 17590, "mire": 38140, "mire</w>": 30140, "miri": 22273, "miriam</w>": 30950, "miro": 34851, "miro</w>": 48317, "mirren</w>": 47600, "mirro": 48500, "mirror": 29823, "mirror</w>": 7220, "mirrors</w>": 21823, "mirza</w>": 36440, "mis": 866, "mis</w>": 11239, "mischief</w>": 33896, "misconceptions</w>": 48681, "misconduct</w>": 30601, "mise": 46567, "mise</w>": 17267, "miser": 33394, "miserable</w>": 26196, "misery</w>": 28360, "mises</w>": 24390, "misfits</w>": 42708, "mish": 15494, "mish</w>": 20981, "misha</w>": 35434, "mishra</w>": 33042, "misleading</w>": 30862, "mism</w>": 15948, "miso": 27657, "miso</w>": 33441, "misogy": 31315, "misogyny</w>": 48415, "miss": 6984, "miss</w>": 1526, "missal</w>": 38337, "missed</w>": 3955, "misses</w>": 15844, "missi": 3008, "missile</w>": 14411, "missiles</w>": 27868, "missin</w>": 36209, "missing": 23509, "missing</w>": 3423, "mission": 12738, "mission</w>": 2406, "missionaries</w>": 40580, "missionary</w>": 27915, "missions</w>": 6990, "mississ": 26483, "mississauga</w>": 28393, "mississi": 11687, "mississippi</w>": 12232, "missou": 30710, "missoula</w>": 48549, "missouri</w>": 11835, "missuni": 26347, "missuniverse</w>": 28766, "missy": 48105, "missy</w>": 31515, "missyou</w>": 45799, "mist": 12610, "mist</w>": 11946, "mistak": 20478, "mistake</w>": 11303, "mistaken</w>": 29182, "mistakenly</w>": 48494, "mistakes</w>": 12824, "mister": 26949, "mister</w>": 18895, "mistle": 46800, "mistletoe</w>": 48569, "mistre": 42039, "mistress</w>": 24349, "mists</w>": 28636, "misty</w>": 18799, "misunderstood</w>": 41574, "misuse</w>": 40970, "mit": 3303, "mit</w>": 4551, "mita</w>": 47514, "mitage</w>": 27964, "mitch": 6969, "mitch</w>": 14150, "mitchell": 39339, "mitchell</w>": 9007, "mite</w>": 26929, "mith": 21752, "mith</w>": 17948, "miti": 17857, "mitigate</w>": 42273, "mitigation</w>": 35514, "mito": 38254, "mitochondri": 42132, "mitra</w>": 47703, "mits</w>": 24086, "mitsu": 17905, "mitsubi": 21604, "mitsubishi</w>": 23030, "mitt": 17321, "mitt</w>": 21341, "mitted</w>": 10307, "mitting</w>": 27938, "mitz": 41827, "mium</w>": 35891, "miwx</w>": 43941, "mix": 3210, "mix</w>": 3285, "mixed": 29376, "mixed</w>": 6780, "mixer</w>": 17200, "mixers</w>": 39175, "mixes</w>": 19061, "mixing</w>": 15588, "mixtape</w>": 11044, "mixture</w>": 28286, "miy": 25695, "miya</w>": 36257, "miz": 20881, "miz</w>": 30795, "mize</w>": 19076, "mized</w>": 43418, "mizing</w>": 38715, "mizz": 19985, "mizzou</w>": 26165, "mj": 13117, "mj</w>": 14733, "mk": 11581, "mk</w>": 8937, "mke</w>": 36642, "mkt</w>": 24814, "ml": 3627, "ml</w>": 5780, "mla</w>": 16723, "mlas</w>": 48464, "mlb": 21039, "mlb</w>": 7482, "mley</w>": 40329, "mlg</w>": 45801, "mlin</w>": 24556, "mlk</w>": 17941, "mlkday</w>": 39905, "mlm</w>": 37611, "mln</w>": 18971, "mlp</w>": 23620, "mlpfi": 45475, "mlpfim</w>": 45640, "mls</w>": 13077, "mm": 1028, "mm</w>": 2848, "mma": 34140, "mma</w>": 6096, "mmc</w>": 44253, "mme</w>": 13105, "mmed</w>": 19570, "mmer": 35717, "mmer</w>": 7508, "mmers</w>": 28128, "mmes</w>": 42862, "mmi</w>": 34147, "mming": 21038, "mming</w>": 16507, "mmings</w>": 31357, "mmit</w>": 41050, "mmj</w>": 43015, "mmm": 37908, "mmm</w>": 7641, "mmmm": 36312, "mmmm</w>": 13180, "mmmmm</w>": 21808, "mmmmmm</w>": 43740, "mmo</w>": 30418, "mmon</w>": 41131, "mmor": 36657, "mmorpg</w>": 39476, "mms</w>": 37803, "mmva</w>": 42666, "mmy": 28837, "mmy</w>": 8722, "mn": 5086, "mn</w>": 4057, "mna</w>": 34877, "mnd</w>": 44776, "mnet</w>": 34129, "mnf</w>": 41105, "mnl</w>": 32980, "mnleg</w>": 42653, "mns</w>": 39040, "mnt</w>": 21477, "mntwins</w>": 45448, "mnwild</w>": 39044, "mnwx</w>": 39592, "mo": 617, "mo</w>": 2080, "moa</w>": 33174, "moana</w>": 43241, "mob": 2818, "mob</w>": 12754, "mobi": 9451, "mobil": 26343, "mobil</w>": 29815, "mobile": 12935, "mobile</w>": 3451, "mobiles</w>": 44302, "mobili": 20770, "mobility</w>": 12546, "mobilization</w>": 48916, "moby</w>": 47219, "moc": 41439, "moc</w>": 36992, "mocha</w>": 28425, "mochi</w>": 47973, "mock": 15641, "mock</w>": 12759, "mocked</w>": 47400, "mocking": 28692, "mocking</w>": 37870, "mocks</w>": 35142, "mod": 6362, "mod</w>": 10893, "moda</w>": 25814, "modal</w>": 33157, "mode": 20402, "mode</w>": 6493, "model": 4591, "model</w>": 2863, "modeled</w>": 39527, "modeling</w>": 13706, "modelling</w>": 19946, "models</w>": 6176, "moder": 2894, "moderate</w>": 16435, "moderated</w>": 27928, "moderating</w>": 34242, "moderator</w>": 32659, "modern": 11706, "modern</w>": 4077, "modernart</w>": 34417, "moderni": 24328, "modernism</w>": 39601, "modernist</w>": 36773, "modernization</w>": 47294, "modes</w>": 30454, "modest</w>": 25436, "modi": 9047, "modi</w>": 7774, "modification</w>": 37630, "modified</w>": 17964, "modo</w>": 36820, "mods</w>": 23843, "modu": 9036, "modular</w>": 22437, "module</w>": 16757, "modules</w>": 30575, "moe": 38655, "moe</w>": 17938, "mof": 30798, "moff": 27160, "mog</w>": 42362, "moga": 41732, "mogadishu</w>": 45133, "mogul</w>": 41320, "moh": 18979, "moh</w>": 35388, "moha": 46892, "moham": 7923, "mohamed</w>": 18472, "mohammad</w>": 19926, "mohammed</w>": 16168, "mohan": 26521, "mohan</w>": 23586, "mohawk</w>": 34942, "mohd</w>": 49094, "mohsin</w>": 48861, "moi": 20691, "moi</w>": 21825, "moil</w>": 30349, "moines</w>": 32091, "moist": 19831, "moist</w>": 33263, "moisture</w>": 20412, "moisturi": 25942, "moj": 34505, "moja": 49055, "mojito</w>": 46830, "mojo</w>": 25204, "mok": 49146, "mol": 4246, "mol</w>": 31582, "mold</w>": 21846, "molding</w>": 46274, "moldova</w>": 47317, "mole": 9927, "mole</w>": 23529, "molecular</w>": 19370, "molecule</w>": 39233, "molecules</w>": 35643, "molina</w>": 34201, "mollie</w>": 48203, "molly": 24368, "molly</w>": 12573, "molo": 41510, "mology</w>": 32255, "molten</w>": 46071, "moly</w>": 47083, "mom": 1614, "mom</w>": 2543, "moma</w>": 33605, "mombasa</w>": 40340, "moment": 12197, "moment</w>": 2495, "momento</w>": 30078, "moments</w>": 5251, "momentum</w>": 15722, "momlife</w>": 43825, "momma</w>": 14508, "mommy</w>": 12456, "momo": 48490, "momo</w>": 25980, "moms": 28446, "moms</w>": 10042, "momsdemand</w>": 33744, "mon": 749, "mon</w>": 2173, "mona</w>": 19143, "monaco</w>": 14938, "monaghan</w>": 39797, "monarch": 27235, "monarch</w>": 22619, "monarchs</w>": 36750, "monarchy</w>": 47503, "monaster": 19422, "monastery</w>": 21850, "monc": 34847, "moncton</w>": 44962, "mond": 14522, "mond</w>": 4475, "monday": 6205, "monday</w>": 2098, "mondaymorning</w>": 40089, "mondaymotiv": 45488, "mondaymotivation</w>": 8198, "mondaymotivaton</w>": 47034, "mondays</w>": 13815, "monde</w>": 29339, "mondo</w>": 36207, "monds</w>": 20317, "mone</w>": 25990, "monet</w>": 24499, "monetary</w>": 26394, "moneti": 38056, "money": 12743, "money</w>": 2327, "mong</w>": 43566, "monger</w>": 38928, "mongers</w>": 27670, "mongo": 20680, "mongolia</w>": 27144, "mongolian</w>": 46335, "moni": 46851, "monia</w>": 31161, "monic": 30893, "monica</w>": 13540, "monit": 9014, "monitor</w>": 10198, "monitored</w>": 45828, "monitoring</w>": 11030, "monitors</w>": 30478, "monk": 30557, "monk</w>": 16424, "monkey": 29597, "monkey</w>": 9465, "monkeys</w>": 15781, "monks</w>": 29090, "monmouth</w>": 36929, "mono": 8220, "mono</w>": 22537, "monochrome</w>": 25576, "monogram</w>": 39665, "monologue</w>": 47776, "monopoly</w>": 25241, "monoxide</w>": 49314, "monro": 45750, "monroe</w>": 13625, "mons</w>": 19885, "monsanto</w>": 37592, "monsi": 46677, "monsieur</w>": 48879, "monsoon</w>": 18872, "monsta</w>": 30718, "monstax</w>": 45631, "monste": 47045, "monster": 14454, "monster</w>": 6060, "monsters</w>": 11546, "mont": 5186, "mont</w>": 5382, "montag": 37202, "montage</w>": 32325, "montal": 42126, "montan": 28405, "montana</w>": 11436, "monte": 8711, "monte</w>": 14667, "montene": 28538, "montenegro</w>": 30378, "monter": 36673, "monterey</w>": 23388, "monterrey</w>": 45254, "montess": 43205, "montessori</w>": 45443, "montgom": 13852, "montgomery</w>": 14951, "month": 7680, "month</w>": 1924, "monthly</w>": 8764, "months</w>": 3109, "monthsary</w>": 42420, "monton</w>": 41961, "montp": 39523, "montre": 8434, "montreal</w>": 9262, "montrose</w>": 42347, "monty": 43997, "monty</w>": 24038, "monu": 9748, "monument</w>": 12019, "monumental</w>": 31297, "monuments</w>": 26916, "mony</w>": 4117, "monza</w>": 40380, "moo": 4953, "moo</w>": 24626, "mood": 42358, "mood</w>": 5394, "moods</w>": 43727, "moody</w>": 17170, "moom": 36887, "moon": 6334, "moon</w>": 3293, "mooney</w>": 37942, "moonlight</w>": 20001, "moons</w>": 29887, "moonshine</w>": 46706, "moor": 14817, "moor</w>": 11877, "moore": 28613, "moore</w>": 6708, "moors</w>": 32577, "moose": 37562, "moose</w>": 17338, "moot</w>": 46895, "mop</w>": 33900, "mopar</w>": 41166, "mor": 657, "mor</w>": 18614, "mora</w>": 29262, "moral</w>": 11246, "morale</w>": 39404, "morales</w>": 27117, "morality</w>": 34133, "morally</w>": 42519, "morals</w>": 46223, "moran</w>": 21557, "moray</w>": 44569, "more": 5434, "more</w>": 750, "morecam": 37305, "morecambe</w>": 43414, "mored</w>": 20195, "moreland</w>": 44135, "moreno</w>": 24826, "morethan": 30889, "morg": 34284, "morgan": 15432, "morgan</w>": 6075, "morgen</w>": 35106, "mori": 25710, "mori</w>": 29514, "moris": 43131, "moritz</w>": 45594, "morley</w>": 40439, "mormon</w>": 27715, "morn</w>": 22393, "mornin</w>": 28327, "morning": 10769, "morning</w>": 1119, "mornings</w>": 12106, "moro</w>": 31613, "moroc": 11996, "moroccan</w>": 27546, "morocco</w>": 15228, "moron</w>": 31875, "morons</w>": 46477, "morow</w>": 40779, "morph": 23915, "morph</w>": 41700, "morphe": 38978, "morpho": 38622, "morrha": 43044, "morri": 9876, "morris": 22560, "morris</w>": 9090, "morrison": 40961, "morrison</w>": 14094, "morrisons</w>": 40965, "morrissey</w>": 30040, "morro</w>": 48363, "morrow</w>": 21611, "mors</w>": 13064, "morse</w>": 25282, "mort": 24257, "mort</w>": 30583, "mortal": 31883, "mortal</w>": 14680, "mortality</w>": 20347, "mortar</w>": 27258, "mortg": 12069, "mortgage</w>": 13988, "mortgages</w>": 45391, "mortimer</w>": 47836, "morton</w>": 20698, "morty</w>": 37391, "mory</w>": 22633, "mos": 28658, "mos</w>": 9593, "mosa": 14164, "mosa</w>": 23809, "mosaic</w>": 17506, "mosch": 47003, "mosco": 9840, "moscow</w>": 10371, "moseley</w>": 47080, "moses</w>": 18451, "mosley</w>": 46228, "mosqu": 15215, "mosque</w>": 12694, "mosques</w>": 41214, "mosquit": 39699, "mosquito</w>": 25083, "mosquitoes</w>": 41870, "moss": 25107, "moss</w>": 12815, "most": 7034, "most</w>": 1096, "mostly</w>": 8829, "mosul</w>": 29165, "mot": 16352, "mot</w>": 15452, "mota</w>": 42499, "motd</w>": 46232, "motel</w>": 26191, "moth": 33208, "moth</w>": 11736, "mother": 7455, "mother</w>": 3050, "motherhood</w>": 32274, "motherland</w>": 46774, "mothers</w>": 10546, "mothersday</w>": 15583, "motherwell</w>": 48104, "moths</w>": 29086, "moti": 38210, "motif</w>": 35373, "motion": 32139, "motion</w>": 7860, "motiv": 3183, "motivate</w>": 26771, "motivated</w>": 16521, "motivates</w>": 44684, "motivating</w>": 37720, "motivation": 26117, "motivation</w>": 4193, "motivational": 32832, "motivational</w>": 20472, "motivationmonday</w>": 28703, "motive</w>": 36669, "motley</w>": 42553, "motm</w>": 41192, "moto": 10646, "moto</w>": 11431, "motocross</w>": 34562, "motogp</w>": 16615, "motor": 3975, "motor</w>": 7659, "motorbike</w>": 33341, "motorcycle</w>": 10297, "motorcycles</w>": 24869, "motoring</w>": 44491, "motorists</w>": 32766, "motorola</w>": 33738, "motors</w>": 14989, "motorsport</w>": 18371, "motorsports</w>": 24264, "motorway</w>": 31808, "motown</w>": 32685, "mott": 44570, "mott</w>": 21708, "motto</w>": 23338, "mou": 2809, "mou</w>": 25289, "moud</w>": 37698, "moul": 25725, "mould</w>": 36743, "moulin</w>": 47656, "moun": 2023, "mound</w>": 21414, "mount": 20553, "mount</w>": 5532, "mountain": 14547, "mountain</w>": 3965, "mountaine": 24841, "mountaineer</w>": 49255, "mountains</w>": 5873, "mounted</w>": 17897, "mounting</w>": 29910, "mounts</w>": 36767, "mour": 9053, "mour</w>": 42446, "moured</w>": 29555, "mourinho</w>": 18536, "mourn</w>": 33592, "mourning</w>": 24169, "mourns</w>": 42811, "mous": 24837, "mous</w>": 17425, "mouse": 33032, "mouse</w>": 9301, "mousse</w>": 31869, "moustache</w>": 32795, "mouth": 15152, "mouth</w>": 4932, "mouths</w>": 38518, "mov</w>": 23950, "move": 16624, "move</w>": 2783, "moved</w>": 6997, "movember</w>": 23474, "movement</w>": 5208, "movements</w>": 19665, "mover</w>": 37673, "movers</w>": 33957, "moves</w>": 6880, "movi": 1707, "movic</w>": 43838, "movie": 11247, "movie</w>": 2016, "movies</w>": 4772, "moving": 32160, "moving</w>": 3584, "mow": 31006, "mow</w>": 36329, "mower</w>": 30895, "mowing</w>": 46424, "mowx</w>": 44263, "moy": 27276, "moy</w>": 34205, "moyes</w>": 37119, "moz": 14761, "moz</w>": 43738, "mozam": 26648, "mozambique</w>": 28831, "mozart</w>": 22132, "mozz": 26317, "mozzarella</w>": 27845, "mp": 1037, "mp</w>": 1246, "mpa</w>": 30749, "mpc</w>": 38560, "mpd</w>": 33814, "mped</w>": 28134, "mper</w>": 22803, "mpg": 39830, "mpg</w>": 37454, "mpgvip</w>": 42149, "mph</w>": 5306, "mpi</w>": 43263, "mping</w>": 27999, "mple</w>": 21139, "mplo": 47071, "mpls</w>": 34298, "mpo</w>": 33674, "mpp</w>": 39570, "mps</w>": 5504, "mption</w>": 9717, "mpton</w>": 27448, "mpu": 47156, "mpus</w>": 25864, "mpy</w>": 17192, "mq": 19103, "mqm</w>": 24687, "mr": 3139, "mr</w>": 1982, "mra": 44568, "mrc</w>": 25897, "mri</w>": 24773, "mrs": 25003, "mrs</w>": 4255, "mrt</w>": 30256, "mru</w>": 22370, "mrw</w>": 15303, "ms": 3525, "ms</w>": 988, "msa</w>": 36306, "msc": 31826, "msc</w>": 20529, "msd": 25804, "msd</w>": 36407, "msdhoni</w>": 32850, "msf</w>": 36239, "msg": 44430, "msg</w>": 10928, "msh": 41751, "msi": 43597, "msi</w>": 45278, "msk": 38501, "msl</w>": 42736, "msm</w>": 22210, "msn": 18824, "msn</w>": 41042, "msnbc</w>": 20245, "mson": 27773, "mson</w>": 12298, "msp": 41445, "msp</w>": 22318, "mss": 42136, "mss</w>": 48610, "mst</w>": 26335, "msu": 26763, "msu</w>": 17298, "mswx</w>": 42957, "msy</w>": 43919, "mt": 4252, "mt</w>": 3284, "mta</w>": 28691, "mtb": 48306, "mtb</w>": 18747, "mtc</w>": 42482, "mtg": 49142, "mtg</w>": 13648, "mth</w>": 48151, "mtl</w>": 22135, "mtn": 26041, "mtn</w>": 18953, "mtr</w>": 46650, "mts</w>": 38751, "mtv": 8099, "mtv</w>": 12555, "mtvbr": 47258, "mtvhottest</w>": 16751, "mtvstars</w>": 19948, "mu": 670, "mu</w>": 6411, "mua</w>": 21395, "muay": 44910, "muaythai</w>": 47763, "mubarak</w>": 17957, "muc</w>": 49115, "much": 14300, "much</w>": 1238, "mucha</w>": 42191, "muchas</w>": 26278, "mucho</w>": 19864, "muck": 44731, "muck</w>": 45330, "mud": 17491, "mud</w>": 11673, "mudder</w>": 49104, "muddy</w>": 21524, "mue": 44383, "mue</w>": 40717, "mueller": 46863, "mueller</w>": 14719, "muen": 48646, "muer": 33840, "muf": 33852, "mufc</w>": 9013, "muffin</w>": 22696, "muffins</w>": 25922, "mufti</w>": 44930, "mug": 16339, "mug</w>": 9722, "mugabe</w>": 36441, "mughal</w>": 37508, "mugs</w>": 22852, "mugshot</w>": 40028, "muh": 36335, "muh</w>": 46475, "muham": 10043, "muhammad</w>": 12259, "muir": 44650, "muir</w>": 24745, "muj": 44635, "muk": 17327, "muk</w>": 32600, "mukher": 34575, "mukherjee</w>": 37862, "mul": 1899, "mul</w>": 43193, "mula</w>": 40937, "mulator</w>": 17463, "mulberry</w>": 39221, "mule</w>": 28695, "mull": 17313, "mull</w>": 35310, "mulled</w>": 44641, "mullen</w>": 30797, "muller</w>": 33956, "mullet</w>": 35010, "mulligan</w>": 44336, "mullins</w>": 41265, "mult": 34219, "multi": 3947, "multi</w>": 6400, "multic": 21683, "multicul": 28004, "multicultural</w>": 34667, "multil": 27975, "multimedia</w>": 27977, "multin": 38996, "multinational</w>": 46540, "multip": 40314, "multiplayer</w>": 27460, "multiple</w>": 6470, "multipurpose</w>": 47665, "multit": 27814, "multitasking</w>": 48684, "mulus</w>": 26180, "mum": 15565, "mum</w>": 4030, "mumb": 5850, "mumbai": 24279, "mumbai</w>": 6971, "mumford</w>": 46184, "mummy</w>": 16301, "mums</w>": 17868, "mun": 2617, "mun</w>": 21059, "muna</w>": 48424, "munch": 23587, "munch</w>": 33299, "munchies</w>": 44324, "munchkin</w>": 41305, "mund</w>": 14244, "mundo</w>": 20990, "muni": 27327, "muni</w>": 39795, "munich</w>": 13526, "munici": 12159, "municipal": 43667, "municipal</w>": 16600, "municipality</w>": 29987, "munition</w>": 32668, "munro</w>": 36501, "munster</w>": 27201, "mup</w>": 21966, "muppet</w>": 40598, "muppets</w>": 40187, "mups</w>": 42195, "mur": 2144, "mur</w>": 18293, "mura": 45176, "mural</w>": 12315, "murals</w>": 31499, "murder": 28136, "murder</w>": 5787, "murdered</w>": 13158, "murderer</w>": 26956, "murderers</w>": 48472, "murdering</w>": 36055, "murders</w>": 22409, "murdoch</w>": 29037, "murphy": 48976, "murphy</w>": 8914, "murray": 31978, "murray</w>": 7513, "murs</w>": 38783, "mus": 2198, "mus</w>": 8103, "musa</w>": 30540, "musc": 5696, "muscat</w>": 33322, "muscle": 27323, "muscle</w>": 9269, "muscles</w>": 16786, "muscular</w>": 30606, "muse": 2369, "muse</w>": 15686, "museo</w>": 36457, "muses</w>": 48243, "museu": 27087, "museum": 15602, "museum</w>": 2786, "museums</w>": 15542, "museumweek</w>": 37996, "mush": 7635, "mushroom</w>": 13011, "mushrooms</w>": 14730, "musi": 15628, "music": 4110, "music</w>": 1179, "musica</w>": 26668, "musical": 36002, "musical</w>": 5173, "musically</w>": 48893, "musicals</w>": 36974, "musichistory</w>": 37890, "musician</w>": 11179, "musicians</w>": 12498, "musicislife</w>": 43311, "musicmonday</w>": 35887, "musicvideo</w>": 26764, "musik</w>": 32986, "musings</w>": 44961, "musique</w>": 42250, "musk": 32143, "musk</w>": 19063, "muskete": 32775, "musketeers</w>": 37993, "musko": 34987, "muskoka</w>": 40832, "musli": 4958, "muslim": 43795, "muslim</w>": 7060, "muslims</w>": 10513, "muss": 41493, "mussels</w>": 33393, "must": 6783, "must</w>": 2048, "mustache</w>": 23451, "mustaf": 23596, "mustafa</w>": 29000, "mustang": 42361, "mustang</w>": 13309, "mustangs</w>": 22500, "mustard</w>": 15794, "muster</w>": 47361, "mustread</w>": 28978, "mut": 12598, "mut</w>": 22839, "mutant</w>": 28384, "mutation</w>": 38626, "mutations</w>": 39651, "mute</w>": 31252, "muted</w>": 48028, "muth</w>": 34280, "mutil": 39950, "mutt</w>": 45924, "mutu": 17574, "mutual</w>": 15055, "mutuals</w>": 31158, "muy</w>": 44625, "mv": 10580, "mv</w>": 8269, "mvc</w>": 40549, "mvp</w>": 8905, "mw": 16725, "mw</w>": 11206, "mwc</w>": 24289, "mwf</w>": 48565, "mx": 21947, "mx</w>": 9575, "my": 1152, "my</w>": 607, "mya</w>": 31401, "myal": 42735, "myan": 13761, "myanmar</w>": 14764, "myart</w>": 38826, "myco": 48362, "mydayin": 41896, "mydayinla</w>": 42801, "mydubai</w>": 43475, "mye": 27551, "myel": 40084, "myers</w>": 15993, "myjaps</w>": 47939, "myle": 43700, "myles</w>": 25511, "mylife</w>": 30537, "mylittle": 37757, "mylittlepony</w>": 45107, "myo": 16206, "myr": 20272, "myra</w>": 35694, "myri": 34972, "myrt": 47785, "myrtle</w>": 27768, "mys</w>": 11724, "myself</w>": 3245, "mysore</w>": 44924, "myspace</w>": 41382, "myster": 4669, "mysteries</w>": 20605, "mysterious</w>": 12650, "mystery": 39828, "mystery</w>": 6711, "mysti": 28711, "mystic": 36264, "mystic</w>": 23722, "mystical</w>": 34122, "myth": 20322, "myth</w>": 13878, "mythical</w>": 34377, "mytho": 43857, "mythology</w>": 22496, "myths</w>": 18675, "mz": 29509, "mz</w>": 33400, "mzan": 36322, "mzansi</w>": 43301, "má": 36842, "mé": 21890, "méxico</w>": 46159, "mü": 28142, "mün": 41235, "n": 77, "n</w>": 333, "na": 1097, "na</w>": 1272, "naa</w>": 37738, "naacp</w>": 32176, "nab": 6951, "nab</w>": 19440, "nabe": 35111, "naby</w>": 24800, "nac": 14557, "nac</w>": 18950, "nach": 12168, "nach</w>": 43622, "nacho</w>": 35647, "nachos</w>": 32847, "nacht</w>": 37261, "nacional</w>": 38782, "nad": 6204, "nad</w>": 43928, "nada</w>": 31683, "nadal</w>": 20814, "nade": 24908, "nadi": 30512, "nadia</w>": 27487, "nadine</w>": 23356, "nadu</w>": 20936, "nae</w>": 19374, "naf": 16161, "naf</w>": 45956, "nafta</w>": 43123, "nag": 6694, "nag</w>": 23902, "naga": 45953, "naga</w>": 38997, "nagar</w>": 17490, "nage</w>": 41219, "nago": 38349, "nagoya</w>": 43303, "nagpur</w>": 43328, "nah": 26421, "nah</w>": 11129, "nahi</w>": 35244, "nai": 6230, "nai</w>": 10692, "naia</w>": 31340, "naidu</w>": 42429, "naija</w>": 16326, "naik</w>": 34424, "nail": 19459, "nail</w>": 9059, "nailart</w>": 43532, "nailed</w>": 19035, "nails</w>": 8469, "nair</w>": 27107, "naira</w>": 39450, "naire</w>": 48892, "nairobi</w>": 17756, "nais": 46396, "naissance</w>": 44761, "naive</w>": 43362, "naj": 30985, "naji": 32589, "nak": 9248, "nak</w>": 25550, "naked": 46371, "naked</w>": 11478, "naku": 39864, "nal": 14132, "nal</w>": 3119, "nale</w>": 27198, "nall</w>": 32869, "nally</w>": 26158, "nam": 1410, "nam</w>": 12344, "nama</w>": 39586, "naman</w>": 27635, "namaste</w>": 35549, "name": 18160, "name</w>": 1981, "named</w>": 3194, "nameis": 40831, "nament</w>": 3916, "naments</w>": 16540, "names</w>": 6130, "namesake</w>": 41298, "nami": 20393, "namibia</w>": 23731, "naming</w>": 19367, "namjoon</w>": 31986, "namm</w>": 35524, "namo": 46013, "namo</w>": 24854, "nan": 4375, "nan</w>": 7750, "nana</w>": 18761, "nanaimo</w>": 40518, "nancy": 21511, "nancy</w>": 11425, "nand": 20435, "nandez</w>": 12764, "nando": 46044, "nang": 48148, "nani</w>": 27980, "nanny</w>": 31104, "nano": 15835, "nano</w>": 22006, "nanop": 34177, "nanotechnology</w>": 42235, "nanow": 46734, "nant</w>": 22526, "nantes</w>": 47533, "nantucket</w>": 41573, "nao</w>": 39319, "naom": 34955, "naomi</w>": 20173, "nap": 6568, "nap</w>": 11012, "napa</w>": 20545, "napier</w>": 40875, "napkin</w>": 38930, "naples</w>": 23560, "napo": 18715, "napol": 20122, "napoleon</w>": 24969, "napoli</w>": 22445, "napp": 11359, "napping</w>": 37657, "naps</w>": 31317, "naq": 46453, "nar": 2977, "nar</w>": 20145, "nara</w>": 33823, "narcis": 25229, "narcissi": 35442, "narco": 38461, "nard</w>": 18216, "nare": 34853, "naren": 8468, "narendr": 9807, "narendra</w>": 25848, "narendramodi</w>": 9853, "narnia</w>": 48693, "narr": 11845, "narrated</w>": 43609, "narrative</w>": 15933, "narratives</w>": 35117, "narrator</w>": 46529, "narrow": 24006, "narrow</w>": 16652, "narrowly</w>": 29747, "naruto</w>": 22732, "nas": 3090, "nas</w>": 15250, "nasa</w>": 6841, "nasal</w>": 42853, "nascar": 25723, "nascar</w>": 7868, "nasdaq</w>": 26629, "nash": 6771, "nash</w>": 13620, "nasheed</w>": 49176, "nashgrier</w>": 33372, "nashville": 45356, "nashville</w>": 8585, "nasi</w>": 47987, "nasir</w>": 47509, "nassau</w>": 34048, "nasser</w>": 43559, "nasty": 32930, "nasty</w>": 8709, "nat": 1276, "nat</w>": 11310, "nata</w>": 39392, "natal</w>": 28516, "natali": 20296, "natalia</w>": 32978, "natalie": 36634, "natalie</w>": 13595, "natash": 48701, "natasha</w>": 23093, "nate": 26643, "nate</w>": 7587, "natgeo": 33009, "natgeo</w>": 25046, "nath": 22203, "nath</w>": 19843, "nathan": 13028, "nathan</w>": 9711, "nathanfillion</w>": 47422, "nathaniel</w>": 32667, "nati": 1060, "nati</w>": 13384, "natic</w>": 44944, "natin</w>": 44358, "nation": 2317, "nation</w>": 2670, "national": 3126, "national</w>": 1362, "nationalbestfriend": 42222, "nationaldogday</w>": 32227, "nationalism</w>": 29867, "nationalist</w>": 25058, "nationality</w>": 44451, "nationally</w>": 15130, "nationalpark</w>": 33060, "nationalparks</w>": 41204, "nationals</w>": 10784, "nationaltrust</w>": 34051, "nations</w>": 7654, "nationwide</w>": 13795, "native": 20639, "native</w>": 4562, "natives</w>": 36060, "nativity</w>": 33988, "natl": 39225, "natl</w>": 34465, "nato</w>": 13139, "nats</w>": 21106, "natu": 2775, "natur": 6800, "natural": 13198, "natural</w>": 3288, "naturally</w>": 12995, "naturals</w>": 44686, "nature": 9382, "nature</w>": 2625, "naturelovers</w>": 41514, "naturephotography</w>": 22533, "natures</w>": 15616, "natureuk</w>": 46193, "nau": 5955, "nau</w>": 32878, "naught</w>": 41001, "naughty": 47255, "naughty</w>": 15101, "nautical</w>": 31660, "nav": 3413, "nav</w>": 25308, "navajo</w>": 35523, "naval": 44725, "naval</w>": 13273, "navar": 24848, "navarro</w>": 37104, "nave": 42704, "naveen</w>": 43837, "naver</w>": 32534, "navi": 16159, "navi</w>": 44848, "navig": 12507, "navigate</w>": 24400, "navigating</w>": 33134, "navigation</w>": 20148, "navigator</w>": 38910, "navis</w>": 36377, "navratri</w>": 45428, "navy": 28414, "navy</w>": 5598, "naw": 16259, "naw</w>": 30500, "nawaz": 49161, "nawaz</w>": 19523, "nax": 38299, "nay": 11704, "nay</w>": 16182, "naya</w>": 38917, "nayanth": 38157, "nayanthara</w>": 45184, "naz": 6363, "naz</w>": 35534, "nazi</w>": 12972, "nazis</w>": 21778, "nb": 6459, "nb</w>": 6813, "nba": 22524, "nba</w>": 5139, "nbad": 43458, "nbaf": 30127, "nbafinals</w>": 33803, "nbap": 41956, "nbaplayoffs</w>": 43860, "nbat": 46291, "nbc": 9352, "nbc</w>": 8799, "nbd</w>": 24526, "nbl</w>": 42652, "nc": 5021, "nc</w>": 4911, "nca": 6921, "ncaa</w>": 9418, "ncbd</w>": 47221, "ncc": 33195, "ncc</w>": 36686, "ncds</w>": 47573, "ncfc</w>": 31274, "ncis</w>": 33617, "ncpol</w>": 40562, "ncr</w>": 38474, "ncs</w>": 42689, "nct": 27723, "nct</w>": 20319, "ncwx</w>": 36166, "nd": 5625, "nd</w>": 1764, "nda</w>": 32862, "ndc</w>": 47564, "ndi</w>": 48229, "ndp</w>": 19257, "nds</w>": 31347, "ndtv</w>": 26261, "ne": 557, "ne</w>": 1422, "nea</w>": 24068, "neal": 33652, "neal</w>": 16730, "near": 11296, "near</w>": 2252, "nearby</w>": 13314, "nearest</w>": 18985, "nearing</w>": 26571, "nearly</w>": 4816, "nears</w>": 37710, "neat": 43201, "neat</w>": 15465, "neath</w>": 18315, "neau</w>": 31559, "neb": 40209, "nebra": 13371, "nebraska</w>": 14565, "nebu": 49295, "nebula</w>": 22532, "nec": 25109, "nec</w>": 22992, "necess": 6961, "necessarily</w>": 25853, "necessary</w>": 8955, "necessities</w>": 43483, "necessity</w>": 33163, "neck": 6066, "neck</w>": 6906, "necklace</w>": 7385, "necklaces</w>": 32276, "necks</w>": 29701, "nectar</w>": 33683, "ned": 16030, "ned</w>": 1369, "nederland</w>": 49058, "nee": 20494, "nee</w>": 10601, "need": 3229, "need</w>": 1262, "needed</w>": 4049, "needing</w>": 22894, "needle": 44490, "needle</w>": 19886, "needles</w>": 27250, "needless</w>": 39984, "needs</w>": 2536, "needy</w>": 30150, "neel": 33092, "neel</w>": 46043, "neer": 34245, "nees</w>": 47248, "neet</w>": 46362, "neg": 5513, "negan</w>": 42623, "negative</w>": 8869, "negatively</w>": 40254, "negativity</w>": 34658, "neglec": 18827, "neglect</w>": 33680, "neglected</w>": 31893, "negli": 32594, "negligence</w>": 45658, "negoti": 10216, "negotiate</w>": 32969, "negotiating</w>": 35510, "negotiation</w>": 36504, "negotiations</w>": 20433, "negr": 42190, "negro</w>": 26554, "neh": 40416, "neh</w>": 41697, "neha</w>": 44463, "nehru</w>": 30316, "nei": 9366, "neigh": 4061, "neighb": 6534, "neighbor": 7759, "neighbor</w>": 14485, "neighborhood</w>": 9471, "neighborhoods</w>": 26713, "neighboring</w>": 44754, "neighbors</w>": 13037, "neighbour": 15858, "neighbour</w>": 23719, "neighbourhood</w>": 20312, "neighbours</w>": 17594, "neil": 13591, "neil</w>": 8030, "neilhimself</w>": 45682, "neill</w>": 19324, "neither</w>": 14398, "nek</w>": 47727, "neko</w>": 47066, "nel": 5476, "nel</w>": 2693, "nell": 27081, "nell</w>": 8117, "nelly</w>": 21166, "nels</w>": 19296, "nelson": 24774, "nelson</w>": 8586, "nem</w>": 45153, "neman</w>": 48553, "neme": 30993, "nemesis</w>": 37811, "nemo</w>": 30441, "nen": 17817, "nen</w>": 15451, "nene</w>": 44167, "neo": 14562, "neo</w>": 11017, "neon": 21043, "neon</w>": 13919, "neonatal</w>": 46464, "neop": 49069, "nep": 20739, "nep</w>": 41960, "nepal": 25597, "nepal</w>": 10066, "nepali</w>": 47579, "neph": 27926, "nephe": 41810, "nephew</w>": 11689, "nephews</w>": 43747, "nephro": 43054, "neptune</w>": 30566, "ner": 2064, "ner</w>": 998, "nerd": 24452, "nerd</w>": 12273, "nerds</w>": 22609, "nerdy</w>": 33124, "nered</w>": 17583, "nerf</w>": 42914, "nering</w>": 20226, "nero</w>": 29048, "ners</w>": 2129, "nerve</w>": 18571, "nerves</w>": 27813, "nervous</w>": 13928, "nery</w>": 48597, "nes": 5457, "nes</w>": 4980, "nesburg</w>": 27159, "nese</w>": 32220, "ness": 7187, "ness</w>": 1294, "nesses</w>": 20107, "nessy</w>": 32939, "nest": 20302, "nest</w>": 8719, "nesting</w>": 28860, "nestle</w>": 43967, "nestled</w>": 38107, "nests</w>": 41133, "net": 1851, "net</w>": 2315, "netany": 23137, "netanyahu</w>": 23583, "netball</w>": 19761, "netes</w>": 44335, "netfli": 6304, "netflix": 35325, "netflix</w>": 6600, "nether": 9946, "netherlands</w>": 11060, "neti": 43980, "netneutrality</w>": 47794, "nets</w>": 8582, "nett": 23403, "nett</w>": 6975, "nette</w>": 13271, "network": 23285, "network</w>": 3304, "networking</w>": 9818, "networks</w>": 10004, "neu": 3855, "neu</w>": 43342, "neue</w>": 45764, "neur": 19001, "neur</w>": 31976, "neural</w>": 26388, "neuro": 7401, "neuro</w>": 36000, "neurological</w>": 41718, "neurology</w>": 43197, "neurons</w>": 40442, "neuroscience</w>": 23381, "neutr": 17207, "neutral</w>": 17011, "neutrality</w>": 26511, "neutron</w>": 44056, "nev": 10236, "nev</w>": 43645, "neva</w>": 43304, "nevada</w>": 13499, "neve": 44099, "neve</w>": 44023, "never": 6746, "never</w>": 1426, "neveragain</w>": 45053, "neverforget</w>": 19242, "nevergiveup</w>": 42497, "neverland</w>": 41483, "nevertheless</w>": 48355, "nevertrump</w>": 47494, "neville</w>": 19269, "nevis</w>": 43670, "new": 1218, "new</w>": 686, "newark</w>": 20240, "newbie</w>": 45427, "newborn</w>": 18320, "newbury</w>": 34169, "newcastle": 41955, "newcastle</w>": 9302, "newcomer</w>": 30648, "newcomers</w>": 44037, "newe": 40068, "newell</w>": 41436, "newer</w>": 33099, "newest</w>": 4990, "newfound": 25250, "newfoundland</w>": 28079, "newh": 18546, "newin": 31911, "newjersey</w>": 32621, "newly": 42186, "newly</w>": 7056, "newman</w>": 15815, "newmarket</w>": 38617, "newmexico</w>": 35238, "newmusic": 32510, "newmusic</w>": 17201, "newor": 25969, "neworleans</w>": 31205, "newport": 42580, "newport</w>": 14846, "newprofile": 14633, "newprofilepic</w>": 14754, "newrelease</w>": 34793, "news": 6216, "news</w>": 1120, "newsat</w>": 43979, "newsc": 28656, "newscast</w>": 45031, "newsle": 10727, "newsletter</w>": 11069, "newsnow</w>": 48650, "newsp": 7109, "newspaper</w>": 8786, "newspapers</w>": 22423, "newsroom</w>": 23200, "newt</w>": 37224, "newton": 33122, "newton</w>": 12606, "newtown</w>": 31747, "newyear": 22161, "newyear</w>": 12999, "newyearseve</w>": 37587, "newyork": 18140, "newyork</w>": 10454, "newyorkcity</w>": 30460, "newyorker</w>": 39732, "newzealand</w>": 21117, "nex": 6897, "nex</w>": 39720, "next": 12434, "next</w>": 1131, "nextgen</w>": 41933, "nexus</w>": 19053, "ney": 3857, "ney</w>": 1438, "neymar</w>": 21878, "neys</w>": 12616, "nez</w>": 27388, "nf": 15195, "nf</w>": 25643, "nfamily</w>": 20098, "nfc</w>": 23695, "nffc</w>": 27893, "nfl": 11219, "nfl</w>": 4691, "nfldraft</w>": 25002, "ng": 10352, "ng</w>": 5215, "nga</w>": 35477, "ngc</w>": 29046, "ngo": 38740, "ngo</w>": 24821, "ngos</w>": 34627, "nguyen</w>": 29947, "nh": 3760, "nh</w>": 10803, "nhc</w>": 44817, "nhl": 12290, "nhl</w>": 8167, "nhlbruins</w>": 39081, "nhljets</w>": 49357, "nhm</w>": 39483, "nhpolitics</w>": 36125, "nhq</w>": 42368, "nhra</w>": 30052, "nhs": 23282, "nhs</w>": 7695, "ni": 697, "ni</w>": 3256, "nia</w>": 3098, "niag": 18071, "niagar": 39298, "niagara</w>": 18965, "niall": 41354, "niall</w>": 8327, "niallo": 22855, "niallofficial</w>": 23084, "niam": 39347, "nian</w>": 46003, "nib</w>": 31049, "nic": 2109, "nic</w>": 6651, "nica</w>": 29040, "nicar": 25119, "nicaragua</w>": 28423, "nice": 28386, "nice</w>": 1805, "nicely</w>": 12303, "nicer</w>": 29488, "nicest</w>": 22967, "niche</w>": 25279, "nichol": 7668, "nicholas": 39814, "nicholas</w>": 13148, "nicholls</w>": 38846, "nichols</w>": 22730, "nicholson</w>": 28745, "nick": 4209, "nick</w>": 4253, "nickel</w>": 22034, "nickelo": 28668, "nickelodeon</w>": 33279, "nicki</w>": 17738, "nickimin": 27390, "nickiminaj</w>": 27593, "nickjonas</w>": 43862, "nickname</w>": 24731, "nicknamed</w>": 45190, "nicks</w>": 15049, "nicky": 28893, "nicky</w>": 22091, "nico": 20850, "nico</w>": 17779, "nicol": 9919, "nicol</w>": 48274, "nicola</w>": 21791, "nicolas": 43813, "nicolas</w>": 18918, "nicole": 21246, "nicole</w>": 10000, "nicot": 45099, "nicotine</w>": 46697, "nie": 9524, "nie</w>": 3501, "niece</w>": 12795, "nieces</w>": 44877, "niel": 19109, "niel</w>": 26837, "niels</w>": 37154, "nielsen</w>": 28372, "nier</w>": 13014, "nies</w>": 10586, "niest</w>": 15007, "nieu": 29781, "nific": 4748, "nifty</w>": 25604, "nig": 27933, "nig</w>": 28099, "nigan</w>": 48516, "nigel": 33919, "nigel</w>": 15153, "niger": 4524, "niger</w>": 29920, "nigeri": 40913, "nigeria</w>": 6106, "nigerian</w>": 12167, "nigerians</w>": 25358, "nigh": 13525, "nigh</w>": 48157, "night": 3870, "night</w>": 930, "nightclub</w>": 20418, "nighter</w>": 41349, "nighting": 36211, "nightingale</w>": 40696, "nightlife</w>": 28823, "nightly</w>": 28868, "nightmare</w>": 12867, "nightmares</w>": 24032, "nightout</w>": 44257, "nights</w>": 4296, "nighttime</w>": 38147, "nightw": 39956, "nih</w>": 25783, "nik": 5126, "nik</w>": 13705, "nike": 16300, "nike</w>": 5783, "nikeplus</w>": 43154, "niki</w>": 36136, "nikita</w>": 37118, "nikk": 38596, "nikki": 23156, "nikki</w>": 16689, "niko</w>": 43771, "nikol": 27430, "nikola</w>": 42146, "nikon": 25488, "nikon</w>": 13849, "nikov</w>": 43960, "nil": 16852, "nil</w>": 35030, "nile</w>": 24252, "nim": 30402, "nim</w>": 42093, "nima</w>": 42586, "nin": 5794, "nin</w>": 14145, "nina</w>": 13891, "nine": 16213, "nine</w>": 7330, "ninety</w>": 48214, "ning": 6050, "ning</w>": 762, "ningham</w>": 23395, "ningly</w>": 43537, "nings</w>": 4588, "nington</w>": 26214, "ninj": 23225, "ninja</w>": 11969, "ninjas</w>": 42796, "nino</w>": 25633, "ninten": 6184, "nintendo": 13969, "nintendo</w>": 7886, "nintendoswitch</w>": 16404, "ninth</w>": 22770, "nip": 33889, "nip</w>": 22333, "nipp": 24634, "nipple</w>": 45987, "nipples</w>": 44774, "nippon</w>": 47960, "nips</w>": 49241, "nir": 15503, "nir</w>": 40057, "nireland</w>": 45763, "niro</w>": 47373, "nirvana</w>": 28300, "nis": 5609, "nis</w>": 3786, "nish": 19834, "nish</w>": 13256, "nished</w>": 24141, "nishi": 32386, "nishings</w>": 49247, "nison</w>": 45700, "niss</w>": 39043, "nissan": 37635, "nissan</w>": 11082, "nist</w>": 17782, "nister</w>": 36640, "nit": 4087, "nit</w>": 19011, "nite</w>": 8427, "niti": 43964, "niti</w>": 45355, "nitin</w>": 37529, "nitro</w>": 30726, "nitrogen</w>": 30706, "niture</w>": 7840, "nity</w>": 12707, "niu</w>": 48187, "niv</w>": 47300, "niversary</w>": 29643, "nix": 48552, "nix</w>": 32278, "nixon</w>": 20671, "nj": 8343, "nj</w>": 6672, "njcaa</w>": 48992, "njpw</w>": 38992, "nk": 22708, "nk</w>": 17456, "nko": 36353, "nl": 12057, "nl</w>": 7655, "nli</w>": 37502, "nlp</w>": 35680, "nlwx</w>": 49260, "nm": 15956, "nm</w>": 11370, "nmd</w>": 43331, "nme</w>": 40454, "nmwx</w>": 47967, "nn": 8947, "nn</w>": 12925, "nnn</w>": 26277, "nnnn</w>": 41420, "no": 578, "no</w>": 871, "noaa</w>": 27557, "noah": 28806, "noah</w>": 11519, "nobel": 33742, "nobel</w>": 15605, "nobelprize</w>": 46074, "noble": 29430, "noble</w>": 12051, "nobody</w>": 7009, "noc": 16988, "noc</w>": 44420, "nocchi</w>": 46359, "noch</w>": 38672, "noche</w>": 29689, "noches</w>": 44166, "nock</w>": 16993, "noctur": 26291, "nocturnal</w>": 41738, "nod</w>": 18648, "nodapl</w>": 39079, "node": 31434, "node</w>": 24871, "nodejs</w>": 39262, "nodes</w>": 40534, "noel": 38406, "noel</w>": 17496, "nof": 29505, "noff</w>": 46979, "nofilter</w>": 16418, "nog</w>": 31157, "noh</w>": 40775, "noi": 43115, "noi</w>": 39889, "noida</w>": 33404, "noir": 39291, "noir</w>": 12953, "nois": 22057, "noise": 41018, "noise</w>": 9307, "noises</w>": 31575, "noisse": 45686, "noisy</w>": 33495, "nokia</w>": 17731, "nol": 8055, "nola</w>": 13289, "nolan</w>": 17323, "nold</w>": 40322, "nole</w>": 34654, "noles</w>": 40569, "nollywood</w>": 43145, "nology</w>": 42221, "nom": 2981, "nom</w>": 12799, "nomad</w>": 27849, "noman": 45592, "nomin": 5643, "nominate</w>": 17122, "nominated</w>": 8710, "nominating</w>": 45747, "nomination</w>": 14136, "nominations</w>": 17124, "nominee</w>": 14122, "nominees</w>": 17873, "nomnom</w>": 26962, "nomore": 35126, "noms</w>": 35706, "non": 4282, "non</w>": 3353, "none": 29644, "none</w>": 8906, "nonetheless</w>": 39675, "nonfiction</w>": 31654, "nonprofit</w>": 19315, "nonprofits</w>": 37935, "nonsense</w>": 19136, "nonstop</w>": 30300, "nont</w>": 25207, "noo": 6759, "noo</w>": 46672, "noodle</w>": 19521, "noodles</w>": 15782, "nook</w>": 30088, "noon": 37693, "noon</w>": 2347, "noor": 46978, "noor</w>": 31323, "nope</w>": 15625, "nor": 1062, "nor</w>": 6190, "nora</w>": 25890, "norcal</w>": 41970, "nord": 19261, "nord</w>": 36067, "nordic": 36439, "nordic</w>": 20734, "nordstrom</w>": 38562, "norfolk": 30232, "norfolk</w>": 12202, "norm": 10990, "norm</w>": 22457, "norma</w>": 35757, "normal": 28748, "normal</w>": 5967, "normali": 45157, "normally</w>": 15870, "norman": 22027, "norman</w>": 11338, "normandy</w>": 23840, "normani</w>": 44596, "norms</w>": 33011, "norris</w>": 21814, "norse</w>": 36559, "norte</w>": 35638, "north": 3468, "north</w>": 2188, "northampton": 49246, "northampton</w>": 26175, "northan": 37081, "northbound</w>": 24228, "northcarolina</w>": 43386, "northe": 24675, "northeast": 42673, "northeast</w>": 13009, "northeastern</w>": 28297, "northeasthour</w>": 42869, "norther": 26908, "northern": 17210, "northern</w>": 5049, "northernlights</w>": 48940, "northkorea</w>": 38495, "northside</w>": 45957, "northumber": 22295, "northumberland</w>": 22922, "northwales</w>": 49371, "northwest</w>": 12894, "northwestern</w>": 23685, "norton</w>": 18032, "norway</w>": 8780, "norwe": 14414, "norwegian</w>": 15971, "norwich": 37629, "norwich</w>": 15812, "norwood</w>": 37889, "nos</w>": 13420, "nose": 24192, "nose</w>": 8231, "noses</w>": 48163, "nostal": 12076, "nostalgia</w>": 16622, "nostalgic</w>": 24468, "not": 2534, "not</w>": 783, "notable</w>": 22023, "notch</w>": 19476, "notdead</w>": 42059, "note": 10910, "note</w>": 3246, "notebook</w>": 16365, "notebooks</w>": 37623, "noted</w>": 22501, "notes</w>": 5795, "nothin</w>": 24291, "nothing": 28412, "nothing</w>": 2586, "noti": 10686, "notic": 6915, "notice</w>": 6683, "noticeable</w>": 40857, "noticed</w>": 9324, "notices</w>": 33459, "noticias</w>": 47759, "noticing</w>": 37571, "notification</w>": 22512, "notifications</w>": 23169, "notified</w>": 39454, "noting</w>": 38649, "notion</w>": 37856, "notjust": 33212, "notjustlakes</w>": 45803, "notmy": 39301, "noto": 29878, "noton": 48258, "notor": 21711, "notori": 44065, "notorious</w>": 22489, "notre": 24397, "notre</w>": 15306, "notredame</w>": 34077, "notsorry</w>": 34361, "nott": 9333, "nott</w>": 34989, "notte</w>": 47308, "nottingham</w>": 12852, "notts</w>": 25598, "nou": 8751, "nou</w>": 30953, "noun</w>": 33663, "nouri": 23796, "nourish</w>": 46025, "nourished</w>": 48354, "nous</w>": 29485, "nouveau</w>": 29948, "nouvel": 34215, "nov": 2264, "nov</w>": 4293, "nova</w>": 11236, "novak</w>": 26465, "novasco": 33785, "novascotia</w>": 34744, "novation</w>": 39753, "nove": 30507, "novel": 15044, "novel</w>": 6080, "novelist</w>": 27314, "novella</w>": 42770, "novels</w>": 16040, "novelty</w>": 37750, "november</w>": 3680, "nover</w>": 37465, "novi": 47957, "novice</w>": 33743, "novo": 27504, "novo</w>": 36581, "now": 2040, "now</w>": 692, "nowadays</w>": 26155, "nowhere</w>": 14108, "nowplaying</w>": 3708, "nowwatching</w>": 30852, "nox</w>": 27406, "noxi": 39304, "noxious</w>": 42833, "noy</w>": 32787, "np": 18205, "np</w>": 6314, "npa</w>": 42378, "npc</w>": 33966, "npr": 39941, "npr</w>": 24078, "nps</w>": 22025, "npt</w>": 47231, "nr": 6574, "nr</w>": 9713, "nra</w>": 17286, "nrc</w>": 45786, "nrf</w>": 47982, "nrg</w>": 48662, "nrl": 27142, "nrl</w>": 18127, "ns": 12405, "ns</w>": 1373, "nsa</w>": 23004, "nsc": 32792, "nsd</w>": 36659, "nsf</w>": 34180, "nsfw</w>": 19847, "nsi": 47824, "nsw": 21301, "nsw</w>": 11693, "nswpol</w>": 44434, "nt": 10902, "nt</w>": 3207, "ntr</w>": 30845, "nts</w>": 43775, "ntt</w>": 22859, "ntv": 24807, "ntv</w>": 45304, "nu": 1156, "nu</w>": 9444, "nucle": 25693, "nuclear": 34136, "nuclear</w>": 7279, "nude</w>": 16630, "nudes</w>": 32122, "nue": 22834, "nuestra</w>": 45649, "nuestro</w>": 38590, "nuev": 47861, "nueva</w>": 48810, "nuevo</w>": 30265, "nufc</w>": 15720, "nuff</w>": 37324, "nug": 13471, "nugent</w>": 47457, "nugget</w>": 25448, "nuggets</w>": 18970, "nuh</w>": 45950, "nuit</w>": 38815, "nuk</w>": 39228, "nuke</w>": 39399, "nul": 29358, "null</w>": 47376, "num": 17896, "num</w>": 30534, "numb": 34639, "numb</w>": 39427, "number": 44078, "number</w>": 2842, "numbered</w>": 25975, "numbers</w>": 6121, "numer": 11442, "numerous</w>": 17082, "numis": 39100, "nun": 12511, "nun</w>": 28540, "nunavut</w>": 48626, "nunes</w>": 40697, "nuns</w>": 44061, "nup": 46757, "nur": 3920, "nur</w>": 33493, "nure": 42480, "nurse": 37547, "nurse</w>": 10058, "nursery</w>": 15540, "nurses</w>": 12938, "nursing</w>": 11126, "nurture</w>": 38865, "nurturing</w>": 45229, "nus": 25157, "nus</w>": 18239, "nut": 10358, "nut</w>": 6491, "nutcracker</w>": 36733, "nutella</w>": 27312, "nutr": 6198, "nutri": 15470, "nutrient</w>": 32900, "nutrients</w>": 24668, "nutriti": 17978, "nutrition": 41546, "nutrition</w>": 7989, "nutritional</w>": 26457, "nutritious</w>": 30387, "nuts</w>": 8644, "nutshell</w>": 26659, "nutty</w>": 39846, "nv": 17217, "nv</w>": 16985, "nvi": 22847, "nvidia</w>": 27325, "nw": 7826, "nw</w>": 7030, "nwa</w>": 34237, "nwo</w>": 40976, "nws": 23333, "nws</w>": 30998, "nwsl</w>": 48394, "nwt</w>": 25029, "nx": 18810, "nx</w>": 16997, "nxt": 35037, "nxt</w>": 17804, "ny": 1383, "ny</w>": 1350, "nya</w>": 24165, "nyc": 13304, "nyc</w>": 2832, "nycc</w>": 27187, "nycfc</w>": 47497, "nye": 40723, "nye</w>": 13416, "nyfw</w>": 21089, "nyk</w>": 46841, "nylon</w>": 25915, "nyo": 41534, "nyo</w>": 44586, "nypd": 42293, "nypd</w>": 18279, "nyr</w>": 32538, "nyrd</w>": 47936, "nys": 36375, "nys</w>": 23423, "nyse</w>": 32650, "nyt": 46311, "nyt</w>": 12816, "nytimes</w>": 13772, "nyu": 43143, "nyu</w>": 31355, "nz": 10142, "nz</w>": 7082, "o": 78, "o</w>": 334, "oa</w>": 11994, "oahu</w>": 37790, "oak": 6010, "oak</w>": 7221, "oakland": 42663, "oakland</w>": 12077, "oakley</w>": 27810, "oaks</w>": 16734, "oakville</w>": 38500, "oasis</w>": 18185, "oat": 20095, "oat</w>": 34132, "oates</w>": 47094, "oath</w>": 20108, "oatmeal</w>": 26374, "oats</w>": 24150, "oax": 43090, "oaxaca</w>": 47818, "ob": 1411, "ob</w>": 14908, "oba": 42902, "oba</w>": 15147, "obam": 13174, "obama</w>": 4276, "obamacare</w>": 18005, "obe": 11897, "obe</w>": 29117, "obedience</w>": 48921, "ober": 15284, "obese</w>": 41757, "obesity</w>": 19499, "obey</w>": 26926, "obi": 21454, "obi</w>": 18414, "obile</w>": 20513, "obitu": 39218, "obituary</w>": 43580, "objec": 7970, "object</w>": 14115, "objective</w>": 23663, "objectives</w>": 30238, "objects</w>": 13770, "obl": 31452, "oblast</w>": 42672, "obli": 11416, "obligation</w>": 34473, "obligations</w>": 38232, "obligatory</w>": 35020, "oblivion</w>": 45323, "obo": 46001, "obo</w>": 26618, "obrien</w>": 31946, "obs</w>": 39162, "obsc": 20392, "obscure</w>": 33337, "obse": 8433, "observ": 9050, "observation</w>": 20250, "observations</w>": 27409, "observatory</w>": 21236, "observe</w>": 23217, "observed</w>": 21267, "observer</w>": 22077, "observers</w>": 47544, "observing</w>": 28359, "obsessed</w>": 9744, "obsession</w>": 15718, "obsi": 47323, "obsole": 35561, "obsolete</w>": 40628, "obst": 29398, "obstac": 24075, "obstacle</w>": 29751, "obstacles</w>": 24480, "obste": 49103, "obstru": 44876, "obstruc": 38762, "obstruction</w>": 40240, "obtain</w>": 26555, "obtained</w>": 29322, "obvious</w>": 13959, "obviously</w>": 10068, "oc": 1566, "oc</w>": 6603, "oca</w>": 31120, "ocal</w>": 38148, "occ</w>": 43940, "occa": 8530, "occasion</w>": 12280, "occasional</w>": 33059, "occasionally</w>": 32479, "occasions</w>": 26154, "occer</w>": 20804, "occi": 42994, "occu": 7863, "occult</w>": 42529, "occup": 11152, "occupation</w>": 18624, "occupational</w>": 30644, "occupied</w>": 17271, "occupy": 22453, "occupy</w>": 24210, "occur": 11264, "occur</w>": 21813, "occurred</w>": 19850, "occurrence</w>": 40615, "occurring</w>": 31335, "occurs</w>": 26563, "ocd</w>": 35904, "oce": 3509, "ocean": 12941, "ocean</w>": 4918, "oceans</w>": 16792, "och": 29334, "och</w>": 32011, "oche</w>": 33045, "oci": 9891, "ocity": 46039, "ock": 33579, "ock</w>": 21313, "ocks</w>": 22410, "oclock</w>": 36274, "oco</w>": 32553, "ocon</w>": 33090, "ocr": 45813, "ocre</w>": 40320, "ocs</w>": 27297, "oct</w>": 4565, "octa": 23444, "octag": 37768, "octagon</w>": 49167, "octane</w>": 43040, "octavia</w>": 47416, "octo": 31032, "october</w>": 3481, "octopus</w>": 22327, "ocu": 22709, "oculus</w>": 30082, "od": 4886, "od</w>": 9719, "oda</w>": 24777, "oday</w>": 41954, "odd": 15525, "odd</w>": 11387, "oddly</w>": 34213, "odds</w>": 11555, "ode": 19125, "ode</w>": 19639, "odell</w>": 41556, "odessa</w>": 43574, "odi": 12223, "odi</w>": 18853, "odin</w>": 35175, "odisha</w>": 15737, "odo": 49188, "odo</w>": 40993, "odor</w>": 39509, "odu": 35095, "odu</w>": 39904, "odyssey</w>": 19991, "oe": 24251, "oe</w>": 11667, "oec": 24288, "oecd</w>": 30816, "oem</w>": 29650, "oes</w>": 3643, "of": 684, "of</w>": 539, "ofa</w>": 29774, "ofc</w>": 19877, "ofe": 30000, "ofer": 47322, "off": 892, "off</w>": 1007, "offe": 8261, "offee</w>": 34059, "offen": 7231, "offence</w>": 34594, "offences</w>": 33972, "offended</w>": 30765, "offender</w>": 48294, "offenders</w>": 35878, "offense</w>": 15253, "offensive</w>": 11037, "offer": 20607, "offer</w>": 3271, "offered</w>": 9395, "offering</w>": 6896, "offerings</w>": 24535, "offers</w>": 4679, "offic": 3276, "office": 18033, "office</w>": 2171, "officeof": 38750, "officeofrg</w>": 47100, "officer</w>": 4683, "officers</w>": 6335, "offices</w>": 10933, "offici": 1401, "official": 5768, "official</w>": 1868, "officially</w>": 4226, "officials</w>": 7658, "officiel</w>": 26548, "offl</w>": 16851, "offline</w>": 22724, "offro": 32198, "offroad</w>": 37173, "offs</w>": 23987, "offseason</w>": 25485, "offset</w>": 28843, "offshore</w>": 15496, "offside</w>": 49347, "offspring</w>": 38635, "offthe": 38189, "ofi": 36692, "ofi</w>": 49090, "oficial</w>": 18061, "oft</w>": 16693, "oftball</w>": 39768, "often</w>": 4864, "ofthe": 7592, "oftheday</w>": 6988, "oftheweek</w>": 20654, "oftheyear</w>": 33975, "og": 11542, "og</w>": 8555, "oga</w>": 47312, "ogden</w>": 42011, "ogil": 39013, "ography</w>": 22399, "ogue</w>": 24761, "ogun": 48970, "oh": 5648, "oh</w>": 1779, "ohana</w>": 48330, "ohh</w>": 23076, "ohhh</w>": 27697, "ohhhh</w>": 40201, "ohi": 5207, "ohio": 18951, "ohio</w>": 6155, "ohiostate</w>": 41324, "ohl": 45547, "ohl</w>": 41095, "ohmy": 29758, "ohn</w>": 48043, "ohs</w>": 39542, "ohwx</w>": 47993, "oi": 27357, "oi</w>": 13934, "oic</w>": 45554, "oid</w>": 14758, "oids</w>": 21847, "oil": 11973, "oil</w>": 2870, "oiland": 32316, "oilandgas</w>": 34130, "oilers</w>": 21627, "oilpainting</w>": 34279, "oils</w>": 17886, "oily</w>": 47550, "oir": 48079, "oir</w>": 37113, "ois</w>": 23262, "oit": 18453, "oitnb</w>": 34865, "oj": 30986, "oj</w>": 34553, "ok": 1944, "ok</w>": 2481, "oka": 42258, "oka</w>": 19092, "okan": 41263, "okanagan</w>": 43233, "okay</w>": 4917, "okc": 42418, "okc</w>": 18357, "oke": 26636, "oke</w>": 23598, "oki</w>": 20390, "okin": 30687, "okinawa</w>": 35877, "okla": 9431, "oklahoma</w>": 10170, "oko": 26892, "oko</w>": 26095, "okstate</w>": 36356, "oktoberfest</w>": 32026, "oku": 45010, "oku</w>": 43829, "okwx</w>": 27336, "ol": 562, "ol</w>": 2985, "ola": 20499, "ola</w>": 3373, "olaf</w>": 39709, "olan": 48489, "olan</w>": 24227, "oland</w>": 26452, "olas</w>": 40800, "old": 4931, "old</w>": 896, "olde</w>": 37731, "older</w>": 7700, "oldest</w>": 9285, "oldham</w>": 29929, "oldie</w>": 35280, "oldies</w>": 36278, "oldman</w>": 48614, "olds</w>": 8580, "oldschool": 44384, "oldschool</w>": 25133, "oldsmobile</w>": 45396, "ole": 9089, "ole</w>": 1947, "oled</w>": 46768, "oler": 24069, "oles</w>": 16962, "olf</w>": 16346, "olga</w>": 34779, "oli": 3811, "oli</w>": 8810, "olic</w>": 31341, "oligar": 46185, "olim": 47769, "olin": 37823, "olin</w>": 18283, "olina</w>": 34711, "oline</w>": 17441, "oling</w>": 38033, "olini</w>": 36040, "olis</w>": 49397, "olithic</w>": 35574, "olive": 22486, "olive</w>": 9898, "oliver": 22882, "oliver</w>": 9261, "olives</w>": 27149, "olivi": 20773, "olivia</w>": 11697, "olivier</w>": 23891, "oll": 32270, "oll</w>": 15510, "olla</w>": 31908, "ollie</w>": 24434, "olls</w>": 42697, "olly</w>": 23998, "olo": 14628, "olo</w>": 7606, "ological</w>": 12345, "ologist</w>": 23442, "ologists</w>": 30912, "ology</w>": 4627, "olor</w>": 29245, "olph</w>": 25077, "ols</w>": 2236, "olsen</w>": 26307, "olson</w>": 28046, "olt</w>": 46252, "olu": 16502, "olu</w>": 46302, "olulu</w>": 27645, "oly": 20323, "oly</w>": 24823, "olym": 3594, "olympi": 13597, "olympia</w>": 23965, "olympiad</w>": 47694, "olympian</w>": 25420, "olympians</w>": 44583, "olympic": 26099, "olympic</w>": 6388, "olympics</w>": 7629, "olympus</w>": 30960, "om": 547, "om</w>": 3932, "oma": 44603, "oma</w>": 5358, "omaha</w>": 16509, "oman": 22088, "oman</w>": 10871, "omar": 19488, "omar</w>": 13367, "omars</w>": 37099, "omas</w>": 36023, "omat": 40788, "omb</w>": 34447, "ombe</w>": 35967, "omd</w>": 49346, "ome": 3693, "ome</w>": 5832, "omed</w>": 16835, "omega</w>": 13465, "omelette</w>": 38789, "omen": 9969, "omen</w>": 25469, "oment</w>": 43683, "omeo</w>": 39844, "omer": 24087, "omer</w>": 17902, "omes</w>": 25736, "ometer</w>": 20060, "ometric</w>": 38702, "omez</w>": 12541, "omf</w>": 47496, "omfg</w>": 12523, "omg": 35233, "omg</w>": 3186, "omi": 24097, "omi</w>": 10341, "omic": 40536, "omic</w>": 12793, "omics</w>": 15138, "omile</w>": 46915, "omin": 16457, "omination</w>": 42571, "oming</w>": 10796, "ominous</w>": 40914, "omni": 18793, "omni</w>": 39489, "omnibus</w>": 44760, "omnic": 48383, "omo": 14478, "omo</w>": 11066, "omon": 48758, "omor": 29431, "oms</w>": 3770, "omusic</w>": 38965, "omy": 40805, "omy</w>": 6884, "on": 521, "on</w>": 525, "ona</w>": 2687, "onair</w>": 29511, "onal</w>": 918, "onboard</w>": 21689, "once": 16331, "once</w>": 2654, "onceupon": 28122, "onceuponatime</w>": 33505, "onco": 46700, "oncology</w>": 24593, "ond": 27918, "ond</w>": 2636, "onda</w>": 32643, "onday</w>": 29864, "onde</w>": 44532, "ondo</w>": 29529, "ondon": 42043, "ondon</w>": 11851, "one": 1980, "one</w>": 637, "onec": 27746, "oned": 28012, "oned</w>": 4698, "onedirection</w>": 16245, "onee</w>": 44433, "oneill</w>": 44808, "onelove</w>": 47417, "onent</w>": 12147, "onents</w>": 11709, "oneof": 48478, "onep": 20440, "onepiece</w>": 43153, "oneplus</w>": 25981, "oner": 30055, "oner</w>": 6071, "oners</w>": 12324, "ones": 20757, "ones</w>": 1575, "oneself</w>": 46874, "onesie</w>": 33237, "oness</w>": 25379, "onet</w>": 36058, "oneteam</w>": 41094, "onetsy</w>": 33392, "onew</w>": 43848, "onews</w>": 18696, "onex</w>": 49116, "oney": 44498, "oney</w>": 9408, "onf</w>": 41790, "onfox</w>": 29874, "ong": 2787, "ong</w>": 846, "onga</w>": 30259, "ongchang</w>": 35071, "ongi</w>": 21754, "ongo</w>": 31226, "ongoing</w>": 10393, "ongs</w>": 12143, "oni": 4385, "oni</w>": 8048, "onia</w>": 8001, "onial</w>": 27599, "onian</w>": 21090, "onic": 15838, "onic</w>": 3711, "onica</w>": 14631, "onics</w>": 9779, "onie</w>": 35249, "onies</w>": 22601, "onimo</w>": 41271, "oning</w>": 5197, "onion</w>": 10985, "onions</w>": 15255, "onist</w>": 10099, "onists</w>": 19659, "onix</w>": 27370, "onized</w>": 43657, "onlin": 31103, "online": 12940, "online</w>": 2027, "onlinemarketing</w>": 41820, "onlineshopping</w>": 38587, "only": 11646, "only</w>": 1033, "onlyin": 32947, "onna": 25438, "onna</w>": 35458, "onnaise</w>": 48934, "onne</w>": 23466, "onnell</w>": 45613, "ono": 28165, "ono</w>": 14388, "onom": 48014, "onomy</w>": 36873, "onpoli</w>": 20708, "ons": 26076, "ons</w>": 708, "onsale</w>": 36324, "onset</w>": 30527, "onsite</w>": 37336, "onstage</w>": 21821, "onstorm</w>": 49333, "ont": 34303, "ont</w>": 11157, "ontari": 6739, "ontario": 42766, "ontario</w>": 7436, "onte</w>": 34723, "onthe": 12241, "onther": 46563, "ontheroad</w>": 47516, "onthisday</w>": 6862, "onto": 11745, "onto</w>": 3141, "ontology</w>": 37364, "ontour</w>": 32155, "onu": 44142, "onward</w>": 34827, "onwards</w>": 20682, "ony": 9490, "ony</w>": 2926, "onym": 11483, "onymous</w>": 13038, "onyx</w>": 31353, "oo": 574, "oo</w>": 2822, "ood": 16429, "ood</w>": 738, "oodle</w>": 45289, "oods</w>": 44660, "oof</w>": 42270, "ooh</w>": 16806, "ook": 22326, "ook</w>": 8394, "ooks</w>": 31082, "ool": 37702, "ool</w>": 929, "oom": 22786, "oom</w>": 15002, "oomf</w>": 40607, "oon": 35651, "oon</w>": 7100, "ooo</w>": 9571, "oooh</w>": 28927, "oooo": 4002, "oooo</w>": 13643, "ooooo</w>": 12532, "oooooo": 43590, "oooooo</w>": 20372, "ooooooo</w>": 30859, "oooooooo": 15473, "oooooooo</w>": 43408, "oooooooooooooooo": 48645, "oop</w>": 7326, "ooper</w>": 39906, "oops</w>": 9116, "oor</w>": 35239, "oos</w>": 9896, "oosa</w>": 30834, "oose</w>": 38941, "oot</w>": 17667, "ootball</w>": 28914, "ootd</w>": 16547, "ooth</w>": 12682, "oott</w>": 34316, "ooza</w>": 22809, "op": 676, "op</w>": 3691, "opa</w>": 28949, "opal</w>": 28982, "opar": 18167, "opath": 33079, "opathic</w>": 37521, "opathy</w>": 28466, "opau": 27239, "opd</w>": 38288, "ope": 31694, "ope</w>": 11440, "opec</w>": 33138, "opel</w>": 36952, "open": 3647, "open</w>": 1488, "openaccess</w>": 26591, "opend": 28069, "opendata</w>": 35709, "openday</w>": 46991, "opened</w>": 5303, "opener</w>": 8998, "openhouse</w>": 36091, "opening": 33728, "opening</w>": 2516, "openingday</w>": 36359, "openings</w>": 27643, "openly</w>": 23005, "opens</w>": 4801, "opensource</w>": 29930, "oper": 2796, "oper</w>": 37533, "opera</w>": 8056, "operate</w>": 19306, "operated</w>": 23031, "operates</w>": 38675, "operating</w>": 12218, "operation": 27173, "operation</w>": 7639, "operational</w>": 18237, "operations</w>": 8106, "operative</w>": 28380, "operator</w>": 15972, "operators</w>": 19267, "opers</w>": 48728, "opes</w>": 37258, "oph": 6796, "opha": 38634, "ophel": 45017, "ophelia</w>": 49118, "ophi": 44547, "ophile</w>": 35915, "opho": 12900, "ophobia</w>": 21111, "ophobic</w>": 29934, "ophon": 25120, "ophone</w>": 26345, "ophthal": 33135, "ophy": 28539, "opi": 40056, "opi</w>": 48994, "opin": 7636, "opini": 14825, "opinion</w>": 7843, "opinions</w>": 16192, "opio": 17371, "opioid</w>": 22833, "opioids</w>": 47578, "opla": 36270, "ople</w>": 25663, "opol": 15173, "opoly</w>": 23729, "opor": 39650, "opoulos</w>": 42020, "opp": 2020, "opp</w>": 21024, "oppa</w>": 23637, "oppo": 7399, "oppo</w>": 41770, "opponent</w>": 17002, "opponents</w>": 19664, "oppor": 2914, "opportun": 2939, "opportunities</w>": 5978, "opportunity</w>": 4004, "oppos": 10091, "oppose</w>": 23617, "opposed</w>": 22509, "opposes</w>": 47471, "opposing</w>": 24376, "opposite</w>": 12872, "opposition</w>": 11062, "oppre": 17341, "oppressed</w>": 41492, "oppression</w>": 30650, "opra</w>": 28291, "oprah</w>": 22562, "opry</w>": 35340, "ops</w>": 3054, "opt": 45103, "opt</w>": 27188, "opted</w>": 42035, "opti": 6580, "optic": 25190, "optic</w>": 24755, "optical</w>": 16822, "optics</w>": 27165, "optim": 22331, "optimal</w>": 25235, "optimi": 9737, "optimis": 39459, "optimism</w>": 25226, "optimist</w>": 44581, "optimistic</w>": 23104, "optimization</w>": 25125, "optimize</w>": 30456, "optimized</w>": 43939, "optimizing</w>": 49157, "optimum</w>": 35974, "optimus</w>": 43453, "option</w>": 8464, "optional</w>": 25411, "options</w>": 7063, "optome": 35533, "opul": 39858, "opus</w>": 33295, "opy</w>": 21835, "or": 523, "or</w>": 541, "ora</w>": 4301, "orac": 24673, "oracle": 37308, "oracle</w>": 15966, "orah</w>": 40820, "orail</w>": 45120, "oral": 32490, "oral</w>": 6007, "orama</w>": 33619, "oran": 32209, "oran</w>": 28395, "orang": 22116, "orange": 13957, "orange</w>": 4287, "oranges</w>": 32417, "orangu": 36112, "orb": 28894, "orb</w>": 36958, "orbit</w>": 19713, "orbital</w>": 40312, "orc</w>": 44305, "orca</w>": 18631, "orcas</w>": 47676, "orch": 11893, "orchar": 40226, "orchard</w>": 19530, "orche": 8004, "orchestr": 42937, "orchestra</w>": 9573, "orchestral</w>": 40285, "orchi": 23696, "orchid</w>": 18678, "orchids</w>": 28376, "ord": 26903, "ord</w>": 11502, "orda": 33462, "ordained</w>": 38302, "order": 24613, "order</w>": 2191, "ordered</w>": 8335, "ordering</w>": 19588, "orderly</w>": 43457, "orders</w>": 6187, "ordin": 4378, "ordinance</w>": 38583, "ordinary</w>": 8012, "ore": 3580, "ore</w>": 1423, "orean</w>": 36696, "ored</w>": 5133, "oregon": 21759, "oregon</w>": 8035, "oren": 21645, "oreo</w>": 21873, "oreos</w>": 41688, "ores</w>": 17328, "org": 3401, "org</w>": 5593, "organ": 3338, "organ</w>": 13213, "organi": 3636, "organic": 24080, "organic</w>": 5980, "organics</w>": 44199, "organis": 13204, "organisation</w>": 15868, "organisations</w>": 20651, "organise</w>": 36073, "organised</w>": 13191, "organiser</w>": 49141, "organisers</w>": 35778, "organising</w>": 22787, "organisms</w>": 37041, "organiz": 11107, "organization</w>": 8064, "organizational</w>": 29510, "organizations</w>": 13453, "organize</w>": 19973, "organized</w>": 10681, "organizer</w>": 23905, "organizers</w>": 27191, "organizing</w>": 15779, "organs</w>": 29872, "orgs</w>": 29500, "ori": 1540, "ori</w>": 8693, "oria</w>": 11474, "orial</w>": 8648, "orian</w>": 21193, "oric</w>": 43810, "orice</w>": 41341, "orie</w>": 18815, "orient": 13149, "orient</w>": 30770, "oriental</w>": 23056, "orientation</w>": 16873, "oriente": 40390, "oriented</w>": 24596, "orienteering</w>": 42985, "ories</w>": 5934, "orig": 2273, "orig</w>": 38463, "origami</w>": 31832, "origin": 2555, "origin</w>": 12372, "original": 18496, "original</w>": 3117, "originally</w>": 12849, "originals</w>": 16953, "originated</w>": 41823, "origins</w>": 16291, "orin</w>": 39863, "oring</w>": 3006, "orio</w>": 24308, "orioles</w>": 21430, "orion</w>": 21765, "oris</w>": 37064, "orities</w>": 7903, "ority</w>": 5556, "orium</w>": 12015, "ork": 22202, "ork</w>": 37235, "orkney</w>": 34254, "orl</w>": 39465, "orlando": 32247, "orlando</w>": 7827, "orleans</w>": 11127, "orm</w>": 38464, "orn": 25412, "orn</w>": 8130, "ornam": 36122, "ornament</w>": 23409, "ornamental</w>": 46270, "ornaments</w>": 28968, "ornate</w>": 46865, "orni": 27713, "ornithology</w>": 38275, "orns</w>": 19340, "oro": 9848, "oro</w>": 14573, "orous</w>": 19286, "orph": 17318, "orphan": 22718, "orphan</w>": 28994, "orphanage</w>": 45196, "orphaned</w>": 46792, "orphans</w>": 36588, "orphe": 39186, "orr</w>": 32977, "ors</w>": 1127, "orship</w>": 20846, "ort</w>": 1019, "ortega</w>": 39727, "orth": 22584, "orth</w>": 24461, "ortho": 11366, "orthodon": 37730, "orthodox</w>": 19008, "orthop": 42123, "orthopedic</w>": 49341, "ortiz</w>": 23544, "orton</w>": 37238, "oru": 44629, "oru</w>": 31281, "orum</w>": 42724, "orwell</w>": 41218, "ory": 16983, "ory</w>": 1985, "os": 2211, "os</w>": 1299, "osa": 16340, "osa</w>": 17237, "osaka</w>": 21347, "osborne</w>": 22402, "osbourne</w>": 43376, "osc": 5092, "oscar": 21157, "oscar</w>": 8191, "oscars</w>": 11098, "osce</w>": 37303, "oscill": 38272, "ose": 46942, "ose</w>": 22541, "osh": 30717, "osh</w>": 35011, "osha</w>": 33907, "oshi</w>": 34770, "osi": 25247, "osi</w>": 17636, "osis</w>": 13903, "osity</w>": 12730, "oslo</w>": 20547, "osm": 31626, "osman</w>": 46539, "oso": 42793, "oso</w>": 21285, "osp": 24387, "ospre": 49001, "osprey</w>": 37893, "oss": 29362, "oss</w>": 34640, "ost": 23701, "ost</w>": 18749, "oste": 20632, "osteo": 43163, "oster": 31781, "ostr": 33673, "ostrich</w>": 47640, "osu": 29480, "osu</w>": 19818, "oswald</w>": 38471, "ot": 1863, "ot</w>": 2062, "ota": 17509, "ota</w>": 8741, "otago</w>": 45919, "otaku</w>": 40743, "otas</w>": 47616, "otc</w>": 37934, "otd</w>": 5683, "ote": 28511, "ote</w>": 19744, "otes</w>": 27280, "oth": 33262, "oth</w>": 33519, "other": 9758, "other</w>": 1010, "others</w>": 3326, "otherwise</w>": 12376, "oti": 19567, "oti</w>": 45564, "otic</w>": 9671, "otis</w>": 28246, "otive</w>": 10877, "oto": 23946, "oto</w>": 23399, "otp</w>": 29822, "otr</w>": 38685, "ots</w>": 5769, "ott": 10167, "ott</w>": 7936, "otta": 7623, "otta</w>": 20941, "ottawa": 49027, "ottawa</w>": 9019, "otte</w>": 35214, "otter": 34710, "otter</w>": 22456, "otters</w>": 38883, "otti</w>": 36721, "ottnews</w>": 33995, "otto</w>": 17730, "ottoman</w>": 27503, "otw</w>": 35259, "otwol": 46868, "ou": 520, "ou</w>": 6544, "ouat</w>": 32954, "ouch</w>": 13493, "oud</w>": 1359, "oue</w>": 48838, "ouf</w>": 34618, "ough": 4204, "ough</w>": 991, "ought</w>": 2253, "oughton</w>": 36860, "oui</w>": 39421, "ouk</w>": 21796, "oul": 20253, "oul</w>": 8081, "ould</w>": 859, "oulos</w>": 32808, "oun": 636, "oun</w>": 20960, "ounce</w>": 15027, "ounces</w>": 30299, "ound": 2013, "ound</w>": 853, "oundation</w>": 40132, "ounded</w>": 9634, "ounding</w>": 11944, "ounds</w>": 2753, "oung": 35875, "oung</w>": 25341, "ounge</w>": 29427, "ount": 43801, "ount</w>": 4172, "ounts</w>": 10963, "oup</w>": 32815, "our": 727, "our</w>": 581, "oura": 29806, "oura</w>": 36352, "ourable</w>": 24126, "ourage</w>": 34525, "oural</w>": 45840, "oured</w>": 6956, "ouri": 12696, "ouring</w>": 12000, "ourism</w>": 25496, "ourke</w>": 26480, "ourlives</w>": 37541, "ouro": 41224, "ours</w>": 1491, "ourse</w>": 15415, "ourselves</w>": 10124, "ourt</w>": 22960, "oury</w>": 29484, "ous": 1987, "ous</w>": 879, "ouse": 32048, "ouse</w>": 7603, "ouses</w>": 33666, "ously</w>": 2501, "ousness</w>": 10689, "ousy</w>": 28302, "out": 1130, "out</w>": 620, "outa</w>": 35187, "outage</w>": 27320, "outages</w>": 40353, "outback</w>": 28532, "outbound</w>": 41256, "outbreak</w>": 20103, "outcome</w>": 16552, "outcomes</w>": 14016, "outdated</w>": 38313, "outdoor": 19184, "outdoor</w>": 6368, "outdoors</w>": 10469, "oute</w>": 44180, "outed</w>": 34435, "outer": 30499, "outer</w>": 14188, "outes</w>": 39600, "outfield</w>": 41826, "outfit</w>": 6525, "outfits</w>": 16366, "outfitters</w>": 37725, "outfy</w>": 34920, "outgoing</w>": 27302, "outh": 16933, "outh</w>": 8111, "outine</w>": 35452, "outing</w>": 11251, "outlander": 45820, "outlander</w>": 17095, "outlaw": 37498, "outlaw</w>": 27340, "outlaws</w>": 30935, "outlet</w>": 16855, "outlets</w>": 20822, "outline</w>": 26894, "outlines</w>": 29159, "outlining</w>": 45960, "outlook</w>": 12983, "outof": 43958, "outpatient</w>": 46603, "outpost</w>": 44622, "output</w>": 17255, "outra": 14262, "outrage</w>": 23577, "outraged</w>": 43402, "outrageous</w>": 29342, "outre": 14373, "outreach</w>": 15297, "outright</w>": 38200, "outs</w>": 5790, "outsi": 22515, "outside": 47693, "outside</w>": 2782, "outsider</w>": 41196, "outsiders</w>": 41742, "outskirts</w>": 42088, "outsourcing</w>": 34543, "outstanding</w>": 6387, "outta</w>": 15807, "outtuesday</w>": 48692, "outw": 34650, "oux": 40960, "oux</w>": 14228, "ov": 6420, "ov</w>": 8479, "ova</w>": 12762, "oval</w>": 15039, "ovarian</w>": 42913, "ovation</w>": 24333, "ove": 8649, "ove</w>": 15456, "oven": 44620, "oven</w>": 12579, "over": 1658, "over</w>": 962, "overall</w>": 6914, "overboard</w>": 42982, "overcame</w>": 47235, "overcast</w>": 36942, "overcome</w>": 14365, "overcoming</w>": 29348, "overdose</w>": 27017, "overdrive</w>": 40088, "overdue</w>": 30240, "overflow</w>": 32885, "overflowing</w>": 45370, "overhaul</w>": 31531, "overhead</w>": 20321, "overland</w>": 38808, "overlay</w>": 44827, "overload</w>": 24327, "overlook</w>": 35767, "overlooked</w>": 27632, "overlooking</w>": 17319, "overly</w>": 28820, "overnight</w>": 9913, "overpass</w>": 44310, "overrated</w>": 38214, "overs": 45774, "overs</w>": 17329, "overseas</w>": 15100, "oversight</w>": 32494, "oversized</w>": 31557, "overtime</w>": 19347, "overturned</w>": 31048, "overview</w>": 14789, "overwatch</w>": 18124, "overweight</w>": 43465, "overwhel": 12204, "overwhelmed</w>": 23459, "overwhelming</w>": 20306, "overwhelmingly</w>": 43549, "ovi": 32508, "ovic</w>": 22417, "ovich</w>": 27623, "ovie": 47677, "ovo": 41920, "ovo</w>": 18065, "ovski</w>": 26167, "ow": 2032, "ow</w>": 2250, "owa</w>": 32770, "owe</w>": 19073, "owed</w>": 37641, "owen": 24838, "owen</w>": 12056, "owens</w>": 20664, "owes</w>": 35069, "owing</w>": 48582, "owl": 34332, "owl</w>": 9899, "owls</w>": 18247, "own": 3845, "own</w>": 1758, "owned</w>": 8536, "owner</w>": 5019, "owners</w>": 7712, "ownership</w>": 16583, "owning</w>": 24661, "owns</w>": 17533, "owo</w>": 46142, "ows</w>": 27423, "owski</w>": 22573, "ox": 3282, "ox</w>": 12071, "oxfam</w>": 45466, "oxford": 28588, "oxford</w>": 8824, "oxfordshire</w>": 37855, "oxi": 33731, "oxi</w>": 48147, "oxid": 17701, "oxide</w>": 28235, "oxo</w>": 37088, "oxy": 12432, "oxygen</w>": 16214, "oy": 6638, "oy</w>": 12437, "oya</w>": 38894, "oye": 48677, "oyster": 40545, "oyster</w>": 17253, "oysters</w>": 22672, "oz": 10584, "oz</w>": 6044, "ozar": 31848, "ozil</w>": 41365, "ozone</w>": 37052, "ozzy</w>": 39549, "p": 79, "p</w>": 335, "pa": 765, "pa</w>": 2217, "paa</w>": 32812, "pab": 9354, "pablo": 42172, "pablo</w>": 14473, "pac": 2332, "pac</w>": 7608, "pace": 40600, "pace</w>": 9450, "paced</w>": 32611, "pacers</w>": 23976, "paces</w>": 43001, "paci": 5699, "pacific": 19723, "pacific</w>": 6654, "pacing</w>": 45202, "pack": 2711, "pack</w>": 3420, "package</w>": 7053, "packaged</w>": 29656, "packages</w>": 14305, "packaging</w>": 11658, "packard</w>": 46421, "packed</w>": 5883, "packer</w>": 28209, "packers</w>": 14294, "packet</w>": 25022, "packets</w>": 40448, "packing</w>": 9829, "packs</w>": 11086, "paco</w>": 41364, "pacqui": 28456, "pacquiao</w>": 30485, "pact</w>": 27182, "pad": 3798, "pad</w>": 7601, "padded</w>": 42253, "paddington</w>": 33162, "paddle": 38276, "paddle</w>": 20811, "paddling</w>": 40645, "paddock</w>": 29590, "paddy": 33103, "paddy</w>": 19855, "padi</w>": 47037, "padilla</w>": 22380, "padma": 44595, "padma</w>": 46457, "padre</w>": 38343, "padres</w>": 22829, "pads</w>": 17353, "paedi": 41488, "paella</w>": 46924, "paf": 47185, "pafc</w>": 49259, "pag": 4151, "pag</w>": 30525, "pagan</w>": 27854, "page": 14996, "page</w>": 2504, "pageant</w>": 22139, "pages</w>": 8082, "pagoda</w>": 44309, "pah": 41054, "pah</w>": 26884, "pai": 20624, "pai</w>": 21198, "paid</w>": 5057, "paige": 33659, "paige</w>": 16022, "paign</w>": 31796, "pain": 2141, "pain</w>": 4495, "paine</w>": 38069, "painful</w>": 16361, "pains</w>": 25639, "paint": 7948, "paint</w>": 5185, "paintball</w>": 39730, "painted</w>": 6433, "painter</w>": 10888, "painters</w>": 35703, "painting": 49164, "painting</w>": 3086, "paintings</w>": 9956, "paints</w>": 21672, "pair": 19848, "pair</w>": 4038, "paired</w>": 12433, "pairing</w>": 16313, "pairings</w>": 41152, "pairs</w>": 9950, "pais": 16878, "paisley</w>": 22954, "pajam": 24110, "pajama</w>": 40244, "pajamas</w>": 37231, "pak": 13186, "pak</w>": 9094, "paki": 3438, "pakistan": 10713, "pakistan</w>": 3994, "pakistani</w>": 14050, "pakistanis</w>": 45707, "pakv": 38196, "pal": 1850, "pal</w>": 3611, "pala</w>": 17895, "palace</w>": 6381, "palaces</w>": 45625, "palad": 28371, "palae": 43379, "palais</w>": 35673, "palate</w>": 34666, "palawan</w>": 48202, "palazzo</w>": 36006, "pale": 4768, "pale</w>": 12518, "paleo": 36741, "paleo</w>": 22198, "paler": 38028, "palermo</w>": 40635, "palestin": 9449, "palestine</w>": 11682, "palestinian</w>": 11764, "palestinians</w>": 21874, "palette</w>": 13901, "pali": 48063, "palin</w>": 40153, "palis": 44256, "pality</w>": 27296, "pall": 35817, "palla": 21208, "palladium</w>": 37888, "pallet</w>": 39057, "palli": 28954, "palliative</w>": 46014, "pally</w>": 46073, "palm": 19651, "palm</w>": 8612, "palma</w>": 29888, "palmer": 40112, "palmer</w>": 13633, "palms</w>": 27059, "palo</w>": 31562, "palom": 47698, "palooza</w>": 25861, "pals</w>": 11043, "palsy</w>": 46651, "pam": 8228, "pam</w>": 18513, "pamela</w>": 26991, "pamp": 37653, "pamper</w>": 44345, "pamph": 41332, "pan": 1072, "pan</w>": 7437, "panam": 24606, "panama</w>": 15522, "panas": 26207, "panasonic</w>": 29750, "pancake</w>": 18723, "pancakes</w>": 15308, "panch": 27251, "pancra": 42472, "pancre": 27708, "pancreatic</w>": 49337, "pancy</w>": 41625, "pand": 5631, "panda</w>": 12952, "pandas</w>": 35119, "pande": 38419, "pandey</w>": 34895, "pandit</w>": 41191, "pandor": 30250, "pandora</w>": 17727, "pandoramusic</w>": 42344, "pane</w>": 27470, "panel</w>": 3724, "paneli": 19410, "panelist</w>": 39719, "panelists</w>": 24619, "panels</w>": 12735, "panera</w>": 48471, "pang": 16756, "pang</w>": 23672, "panhandle</w>": 40919, "pani</w>": 36092, "panic": 46671, "panic</w>": 14124, "panini</w>": 30410, "pann": 42302, "panna</w>": 49065, "pano</w>": 36165, "panor": 12962, "panorama</w>": 19763, "panoramic</w>": 22563, "pans</w>": 35204, "pant</w>": 22550, "panther": 22825, "panther</w>": 13262, "panthers</w>": 10494, "panties</w>": 32515, "panto</w>": 28776, "pantry</w>": 25608, "pants</w>": 5003, "panty": 44217, "pany</w>": 45567, "panzer</w>": 41159, "pao</w>": 33790, "paola</w>": 44689, "paolo": 48488, "paolo</w>": 21133, "pap": 1884, "pap</w>": 30756, "papa</w>": 12211, "papar": 32782, "paparazzi</w>": 37842, "papaya</w>": 44098, "paper": 8680, "paper</w>": 2802, "paperback</w>": 17928, "papers</w>": 8204, "paperwork</w>": 35785, "papi</w>": 35177, "papp": 26361, "paprika</w>": 44793, "papua</w>": 32629, "par": 699, "par</w>": 9163, "para": 18355, "para</w>": 8976, "parach": 23147, "parachute</w>": 30122, "parad": 37143, "parade</w>": 5809, "parades</w>": 46479, "paradi": 6658, "paradig": 27786, "paradigm</w>": 33485, "paradise": 45869, "paradise</w>": 7247, "paradox</w>": 33109, "parag": 11866, "paragon</w>": 48099, "paragra": 24903, "paragraph</w>": 28499, "paragu": 38021, "paraguay</w>": 43579, "paral": 15143, "paralle": 13184, "parallel</w>": 18201, "paralleled</w>": 42520, "parallels</w>": 46101, "paraly": 30255, "paralym": 18727, "paralympic</w>": 30806, "paralympics</w>": 37162, "paralysis</w>": 45702, "param": 12250, "parame": 27106, "paramedic</w>": 34630, "paramedics</w>": 35991, "parameters</w>": 44890, "paramore</w>": 34401, "paramount</w>": 26642, "parano": 30283, "paranoid</w>": 43029, "paranor": 16940, "paranormal</w>": 19047, "parap": 41091, "paras": 15198, "parasite</w>": 42460, "parasites</w>": 46175, "parc</w>": 30914, "parcel</w>": 30367, "parcels</w>": 45589, "pard</w>": 18773, "pardon": 47606, "pardon</w>": 26565, "pare": 18202, "pared</w>": 5498, "paren": 3106, "parent": 47848, "parent</w>": 10183, "parental</w>": 28339, "parenthood</w>": 23887, "parenting</w>": 14529, "parents</w>": 3731, "pares</w>": 12420, "parfait</w>": 46140, "pari": 17961, "pari</w>": 27979, "paris": 13982, "paris</w>": 3445, "parisagreement</w>": 47405, "parish": 47328, "parish</w>": 13020, "parisi": 45081, "parisian</w>": 38512, "parity</w>": 42734, "park": 4985, "park</w>": 1452, "parked</w>": 16487, "parker": 31119, "parker</w>": 8365, "parkin": 34868, "parking</w>": 5984, "parkinson</w>": 28129, "parkland</w>": 31287, "parkrun</w>": 25747, "parks</w>": 6873, "parkway</w>": 19882, "parl": 30373, "parl</w>": 29897, "parliam": 5941, "parliament": 41599, "parliament</w>": 7151, "parliamentary</w>": 17912, "parlor</w>": 38253, "parlour</w>": 37829, "parma</w>": 36077, "parme": 26295, "parmesan</w>": 27274, "paro": 17429, "parody</w>": 24318, "parole</w>": 32158, "parr</w>": 44113, "parrish</w>": 43043, "parrot</w>": 23565, "parry</w>": 40604, "parsley</w>": 30077, "parsons</w>": 22505, "part": 1872, "part</w>": 1551, "parte</w>": 48508, "parth</w>": 34790, "parti": 10509, "partial</w>": 18957, "partially</w>": 21269, "partic": 2871, "partici": 9540, "particip": 4400, "participant</w>": 27674, "participants</w>": 10237, "participate</w>": 9433, "participated</w>": 14252, "participates</w>": 46414, "participating</w>": 11535, "participation</w>": 13529, "particle</w>": 27716, "particles</w>": 27012, "particul": 11408, "particular</w>": 14098, "particularly</w>": 12170, "parties</w>": 9032, "parting</w>": 32844, "partisan</w>": 20772, "partist</w>": 44713, "partition</w>": 42219, "partly</w>": 21459, "partner": 5210, "partner</w>": 4568, "partnered</w>": 21402, "partnering</w>": 21182, "partners</w>": 5568, "partnership</w>": 6123, "partnerships</w>": 17418, "parton</w>": 43245, "partridge</w>": 34872, "parts</w>": 5149, "party": 12877, "party</w>": 1580, "partying</w>": 25702, "pas": 1341, "pas</w>": 9525, "pasadena</w>": 25892, "pascal</w>": 28626, "pasco</w>": 49220, "pascu": 42692, "pash": 23936, "pasha</w>": 46986, "paso</w>": 18542, "pasqu": 44941, "pass": 5016, "pass</w>": 3511, "passage</w>": 16477, "passages</w>": 48937, "passed</w>": 4957, "passenger</w>": 12311, "passengers</w>": 12781, "passer</w>": 48544, "passes</w>": 7633, "passi": 32471, "passing</w>": 6589, "passion": 8822, "passion</w>": 5332, "passionate</w>": 10947, "passionately</w>": 44028, "passions</w>": 38441, "passive</w>": 23171, "passover</w>": 38426, "passport</w>": 14739, "passports</w>": 46368, "password</w>": 20258, "passwords</w>": 43095, "past": 7315, "past</w>": 2729, "pasta</w>": 10441, "paste": 34765, "paste</w>": 17038, "pastel</w>": 19457, "pastels</w>": 45699, "pastor": 19792, "pastor</w>": 9664, "pastoral</w>": 37191, "pastors</w>": 30959, "pastr": 45478, "pastries</w>": 39409, "pastry</w>": 18582, "pasture</w>": 34764, "pastures</w>": 47793, "pat": 1300, "pat</w>": 7036, "patag": 29862, "patagonia</w>": 32786, "patch": 29284, "patch</w>": 8721, "patches</w>": 22104, "patchwork</w>": 44675, "patchy</w>": 47488, "pate": 42122, "pate</w>": 42098, "patel</w>": 14168, "patent</w>": 14692, "patented</w>": 37277, "patents</w>": 33911, "paterson</w>": 36560, "path": 7408, "path</w>": 5035, "pathetic</w>": 18222, "pathfinder</w>": 35415, "pathi": 34976, "pathi</w>": 27347, "pathic</w>": 49025, "patho": 18534, "pathology</w>": 23290, "paths</w>": 16333, "pathway</w>": 23488, "pathways</w>": 24690, "pathy</w>": 13330, "pati": 2799, "pati</w>": 26708, "patience</w>": 13575, "patient": 30139, "patient</w>": 6262, "patiently</w>": 22980, "patients</w>": 5543, "patil</w>": 49187, "patio</w>": 14304, "pational</w>": 30627, "patna</w>": 45025, "patory</w>": 41859, "patreon</w>": 17165, "patri": 4771, "patriarch</w>": 49054, "patriarchy</w>": 48806, "patric": 12569, "patrice</w>": 40731, "patricia</w>": 18143, "patrick": 12078, "patrick</w>": 5286, "patricks</w>": 46783, "patriot": 28896, "patriot</w>": 15692, "patrioti": 35520, "patriotic</w>": 20217, "patriotism</w>": 35807, "patriots</w>": 8707, "patro": 31650, "patrol</w>": 10073, "patrolling</w>": 39344, "patrols</w>": 35978, "patron": 26658, "patron</w>": 17683, "patrons</w>": 28308, "pats</w>": 24874, "patsy</w>": 46093, "patt": 12637, "patter": 4982, "pattern</w>": 7447, "patterned</w>": 47212, "patterns</w>": 11637, "patterson</w>": 21384, "patti": 44927, "patti</w>": 26123, "pattinson</w>": 32474, "patton</w>": 29026, "patty": 48741, "patty</w>": 18321, "pau": 1834, "pau</w>": 35970, "paul": 6035, "paul</w>": 2597, "paula": 37363, "paula</w>": 16777, "pauline</w>": 30438, "paulo": 48002, "paulo</w>": 21628, "pauls": 41413, "pauls</w>": 40010, "paulson</w>": 48201, "pause</w>": 19439, "paused</w>": 46782, "pav": 6661, "pave</w>": 37107, "paved</w>": 27898, "pavel</w>": 43152, "pavement</w>": 27669, "pavilion</w>": 13374, "paving</w>": 28651, "paw": 14009, "paw</w>": 16016, "pawan": 29754, "pawankalyan</w>": 33702, "pawn</w>": 43195, "paws</w>": 16714, "pax": 20007, "pax</w>": 19033, "paxton</w>": 38347, "pay": 2642, "pay</w>": 3345, "payback</w>": 36413, "paycheck</w>": 45078, "payday</w>": 26957, "payee</w>": 46985, "payer</w>": 41503, "paying</w>": 8341, "payment</w>": 10596, "payments</w>": 11832, "payne</w>": 12775, "paypal</w>": 21442, "payroll</w>": 31610, "pays</w>": 10845, "paysoff</w>": 48174, "paytm</w>": 45352, "payton</w>": 27348, "paz</w>": 22267, "pb": 20112, "pb</w>": 10981, "pba</w>": 28205, "pbb": 48567, "pbb</w>": 40589, "pbc</w>": 49191, "pbl</w>": 35166, "pbr</w>": 32998, "pbs</w>": 17908, "pc": 6782, "pc</w>": 3808, "pca</w>": 35705, "pcb</w>": 26235, "pcc</w>": 36059, "pci</w>": 38957, "pcm</w>": 47436, "pcr</w>": 35704, "pcs</w>": 11917, "pcso</w>": 31963, "pct</w>": 22168, "pd": 4387, "pd</w>": 4675, "pdates</w>": 16842, "pdc</w>": 40498, "pdf</w>": 15181, "pdp</w>": 24601, "pdt</w>": 21743, "pdx": 25470, "pdx</w>": 16153, "pe": 661, "pe</w>": 956, "pea</w>": 13915, "peabo": 34083, "peabody</w>": 41244, "peac": 34615, "peace": 6249, "peace</w>": 3021, "peaceful</w>": 9461, "peacefully</w>": 30530, "peacekeeping</w>": 43630, "peach": 10522, "peach</w>": 11538, "peaches</w>": 27216, "peak": 18572, "peak</w>": 6026, "peakdistrict</w>": 41289, "peake</w>": 24810, "peaked</w>": 36391, "peaks</w>": 14067, "pean": 11563, "peanu": 25843, "peanut</w>": 12491, "peanuts</w>": 26503, "pear": 4910, "pear</w>": 18820, "pearce</w>": 25996, "pearl": 21806, "pearl</w>": 8560, "pearljam</w>": 46739, "pearls</w>": 19581, "pears</w>": 39565, "pearson</w>": 20461, "peas</w>": 15937, "peasant</w>": 40621, "peasants</w>": 48788, "peat</w>": 26914, "pebble</w>": 28056, "pebbles</w>": 40155, "pec": 32447, "pec</w>": 17611, "pecan</w>": 32177, "peck": 25186, "peck</w>": 29234, "pecker</w>": 30169, "peckham</w>": 45863, "pecu": 34200, "peculiar</w>": 42808, "ped": 13197, "ped</w>": 2966, "pedago": 34590, "pedagogy</w>": 48072, "pedal": 32943, "pedal</w>": 19621, "pedals</w>": 38535, "pede": 12862, "pede</w>": 19560, "pedestri": 30027, "pedestrian</w>": 18256, "pedestrians</w>": 33895, "pedi": 12967, "pedia</w>": 11733, "pediatric": 48431, "pediatric</w>": 22071, "pedic": 35319, "pedic</w>": 44528, "pedro": 29963, "pedro</w>": 15114, "peds</w>": 45377, "pee": 12988, "pee</w>": 11196, "peed</w>": 47369, "peek": 46323, "peek</w>": 7569, "peeking</w>": 48771, "peel": 34386, "peel</w>": 17158, "peeled</w>": 33533, "peeling</w>": 48649, "peep": 25425, "peep</w>": 16857, "peeps</w>": 11681, "peer": 32416, "peer</w>": 14432, "peers</w>": 21626, "pees</w>": 31830, "peg": 32182, "peg</w>": 11207, "pegas": 30018, "pegasus</w>": 37822, "peggy</w>": 24271, "pei": 48166, "pei</w>": 12917, "pel": 4286, "pel</w>": 7006, "pele</w>": 44105, "pelican</w>": 34131, "pelicans</w>": 29363, "pell</w>": 46981, "pelle": 31267, "pelled</w>": 32506, "pellegr": 38529, "pellets</w>": 48240, "pelo": 40192, "pelo</w>": 40238, "pelosi</w>": 22169, "pelvic</w>": 45646, "pemb": 19880, "pembro": 24084, "pembroke": 36702, "pembroke</w>": 40044, "pembrokeshire</w>": 40695, "pen": 1501, "pen</w>": 5356, "pena</w>": 35788, "penalties</w>": 25417, "penalty</w>": 11491, "penang</w>": 29545, "penc": 20065, "pence</w>": 18002, "pencil": 41303, "pencil</w>": 11200, "pencils</w>": 21909, "pend": 3052, "pendant</w>": 12415, "pendants</w>": 44117, "pending</w>": 12770, "pendleton</w>": 44272, "pendu": 45336, "penelope</w>": 36703, "penetr": 26058, "peng": 42955, "peng</w>": 39200, "pengu": 8854, "penguin": 28249, "penguin</w>": 14952, "penguins</w>": 16557, "peninsu": 13464, "peninsula</w>": 14070, "penn": 7760, "penn</w>": 11128, "pennant</w>": 43971, "penned</w>": 45077, "penney</w>": 47856, "pennies</w>": 43094, "pennsylvania</w>": 13673, "penny": 20400, "penny</w>": 11388, "pens": 13307, "pens</w>": 13310, "pensac": 30925, "pensacola</w>": 33573, "pension": 32840, "pension</w>": 17764, "pensions</w>": 29773, "penske</w>": 47154, "pent": 10699, "pent</w>": 22725, "pentagon</w>": 23133, "pente": 33165, "penthouse</w>": 32673, "penultimate</w>": 36553, "peop": 1030, "people": 10573, "people</w>": 1047, "peoples": 28241, "peoples</w>": 14627, "peopleschoice</w>": 32418, "peoplesvote</w>": 45830, "peoria</w>": 36985, "pep": 12761, "pep</w>": 14898, "pepe</w>": 24778, "pepp": 34425, "pepper": 14861, "pepper</w>": 8253, "peppermint</w>": 30321, "pepperoni</w>": 47307, "peppers</w>": 14650, "pepsi</w>": 21307, "per": 703, "per</w>": 1284, "pera</w>": 26294, "perce": 24135, "perceived</w>": 38436, "percent": 16328, "percent</w>": 9017, "percentage</w>": 19477, "percep": 28017, "perception</w>": 20591, "perceptions</w>": 38138, "perch</w>": 34281, "perched</w>": 40071, "percu": 41722, "percussion</w>": 23980, "percy</w>": 23940, "pere": 8665, "pere</w>": 36300, "pered</w>": 24509, "peregr": 37479, "peregrine</w>": 44546, "pereira</w>": 43927, "peren": 24564, "perenni": 26996, "perennial</w>": 34038, "perez</w>": 15107, "perf</w>": 22816, "perfe": 1624, "perfec": 6599, "perfect": 17261, "perfect</w>": 1878, "perfection</w>": 9646, "perfectly</w>": 8037, "perfecto</w>": 42898, "perfor": 2311, "perform": 3866, "perform</w>": 5940, "performan": 8973, "performance</w>": 2714, "performances</w>": 9553, "performed</w>": 9997, "performer</w>": 17061, "performers</w>": 18476, "performing</w>": 5170, "performs</w>": 13839, "perfu": 14214, "perfume</w>": 17525, "perhaps</w>": 9297, "peri": 12618, "peri</w>": 44068, "perience</w>": 19302, "peril": 40119, "peril</w>": 48301, "perimeter</w>": 38499, "pering</w>": 29746, "perio": 5101, "period</w>": 6131, "periodic</w>": 36476, "periods</w>": 24401, "periph": 35308, "peripheral</w>": 43901, "peris": 19461, "periscope</w>": 21668, "perk</w>": 33424, "perkins</w>": 20057, "perks</w>": 17660, "perl": 44018, "perm</w>": 47847, "perman": 9018, "permanent</w>": 11144, "permanently</w>": 25584, "perme": 42456, "permission</w>": 15822, "permit</w>": 21950, "permits</w>": 33267, "permitted</w>": 44380, "pero</w>": 23551, "perpe": 15749, "perpetr": 33376, "perpetu": 30132, "perpetual</w>": 32018, "perrie</w>": 32691, "perry": 28478, "perry</w>": 7899, "pers": 3688, "pers</w>": 10710, "perse": 27498, "persecu": 22878, "persecution</w>": 32009, "perseverance</w>": 29820, "persi": 11509, "persian</w>": 19859, "persist": 19412, "persist</w>": 40938, "persistence</w>": 34588, "persistent</w>": 29028, "person": 3510, "person</w>": 2533, "persona</w>": 18401, "personal": 10114, "personal</w>": 4121, "personalised</w>": 24186, "personalities</w>": 27888, "personality</w>": 10386, "personalized</w>": 17845, "personally</w>": 13885, "personnel</w>": 14546, "persons</w>": 14592, "perspec": 17997, "perspective</w>": 8996, "perspectives</w>": 18777, "persu": 20972, "pert": 36970, "pert</w>": 16306, "perth": 19067, "perth</w>": 11011, "peru": 20612, "peru</w>": 12964, "peruvian</w>": 30822, "pes": 38368, "pes</w>": 2598, "pesa</w>": 47409, "pesc": 44044, "pesh": 33184, "peshaw": 28524, "peshawar</w>": 29230, "pesky</w>": 42512, "pesos</w>": 47872, "pessi": 43902, "pest": 20130, "pest</w>": 9425, "pesticide</w>": 48481, "pesticides</w>": 37868, "pesto</w>": 26186, "pests</w>": 41919, "pet": 2167, "pet</w>": 3703, "peta</w>": 28785, "petal": 38430, "petal</w>": 40469, "petals</w>": 26064, "petday</w>": 45314, "pete": 14479, "pete</w>": 8571, "peter": 5093, "peter</w>": 3696, "peterborough</w>": 26012, "peters</w>": 16336, "petersburg</w>": 21052, "petersen</w>": 39794, "peterson</w>": 16877, "peth</w>": 48920, "petit": 36437, "petit</w>": 21276, "petite</w>": 27213, "petition</w>": 10975, "petitions</w>": 43536, "petr": 29808, "petra</w>": 31300, "petre": 47179, "petri": 31831, "petro": 8716, "petrol</w>": 18149, "petroleum</w>": 22063, "petron": 42875, "pets</w>": 7663, "pett": 27051, "petti": 48001, "petting</w>": 44334, "petty</w>": 17324, "peu": 21411, "peuge": 22893, "peugeot</w>": 24129, "pew": 21608, "pew</w>": 30783, "pewdie": 41882, "pewdiepie</w>": 42563, "pex</w>": 43765, "pey": 14966, "pey</w>": 30933, "peyton": 49254, "peyton</w>": 20307, "pez": 45798, "pez</w>": 10482, "pf": 16680, "pf</w>": 12572, "pfa</w>": 47839, "pfc</w>": 35007, "pff</w>": 44121, "pfi": 29810, "pfw</w>": 31229, "pg": 12476, "pg</w>": 5211, "pga</w>": 13351, "pgat": 36514, "pgatour</w>": 40094, "pgh": 44862, "pgh</w>": 30031, "pgs</w>": 49204, "ph": 745, "ph</w>": 2042, "pha": 4443, "pha</w>": 26255, "phal": 19962, "phan": 8731, "phan</w>": 40126, "phant": 36998, "phantom": 37688, "phantom</w>": 14490, "phar": 5570, "phara": 35792, "pharaoh</w>": 40437, "pharm</w>": 45761, "pharma</w>": 17831, "pharmac": 8193, "pharmaceu": 19490, "pharmaceutical</w>": 25217, "pharmaceuticals</w>": 44623, "pharmacist</w>": 41024, "pharmacists</w>": 44337, "pharmacy</w>": 15293, "pharo": 42308, "pharoah</w>": 49287, "pharrell</w>": 31316, "phase</w>": 8304, "phases</w>": 35337, "phat</w>": 42492, "phc</w>": 41102, "phd": 20875, "phd</w>": 8472, "phdchat</w>": 39564, "phdlife</w>": 39638, "phe": 4787, "phe</w>": 19853, "pheasant</w>": 41983, "phee</w>": 41292, "phel": 23711, "phelps</w>": 27128, "phen": 7718, "pheno": 47336, "phenom": 31673, "phenom</w>": 39618, "phenomen": 11304, "phenomena</w>": 41538, "phenomenal</w>": 15035, "phenomenon</w>": 24464, "pher": 9194, "pher</w>": 19828, "phers</w>": 29531, "pherson</w>": 36421, "phew</w>": 10295, "phi": 2239, "phi</w>": 12220, "phia</w>": 9228, "phic</w>": 3977, "phie</w>": 30237, "phies</w>": 17062, "phil": 2821, "phil</w>": 6199, "phila</w>": 47443, "philadel": 9428, "philadelphia</w>": 9749, "philanthro": 16587, "philanthropist</w>": 44153, "philanthropy</w>": 25047, "philately</w>": 33695, "phile</w>": 36543, "philharmon": 25228, "philharmonic</w>": 31699, "phili": 4277, "philia</w>": 46654, "philip": 20748, "philip</w>": 11074, "philipp": 5623, "philipp</w>": 47591, "philippe</w>": 20942, "philippine</w>": 17629, "philippines</w>": 8149, "philips</w>": 25175, "phill": 42346, "phill</w>": 48272, "philli": 6456, "phillies</w>": 18748, "phillip": 48832, "phillip</w>": 19323, "phillips</w>": 11041, "philly": 19545, "philly</w>": 7785, "philos": 8395, "philosop": 20349, "philosoph": 10187, "philosopher</w>": 25220, "philosophical</w>": 32628, "philosophy</w>": 12213, "phils</w>": 38573, "phin</w>": 33816, "phine</w>": 40985, "phins</w>": 40210, "phish</w>": 36897, "phishing</w>": 36546, "phl</w>": 25603, "pho": 816, "pho</w>": 22707, "phobia</w>": 28749, "phoe": 22673, "phoebe</w>": 27582, "phoeni": 6778, "phoenix": 20615, "phoenix</w>": 7793, "phol</w>": 48140, "phon": 19602, "phon</w>": 31115, "phone": 15486, "phone</w>": 1951, "phones</w>": 6351, "phony</w>": 31925, "phora</w>": 31363, "phosp": 22638, "photo": 1153, "photo</w>": 1125, "photobomb</w>": 37075, "photobook</w>": 41894, "photog</w>": 28115, "photogenic</w>": 36108, "photogra": 36754, "photograph": 1688, "photograph</w>": 8853, "photographed</w>": 11573, "photographer</w>": 5748, "photographers</w>": 17141, "photographic</w>": 22053, "photographing</w>": 30074, "photographs</w>": 15759, "photography": 33183, "photography</w>": 2108, "photom": 32223, "photoo": 11106, "photooftheday</w>": 11933, "photos</w>": 2479, "photoshoot</w>": 11121, "photoshop</w>": 12419, "photoshopped</w>": 35738, "phouse</w>": 27848, "php</w>": 17370, "phra": 12777, "phrase</w>": 18809, "phrases</w>": 35264, "phs</w>": 16495, "phu": 21274, "phuket</w>": 34028, "phx": 35466, "phx</w>": 29507, "phy": 6484, "phy</w>": 4292, "phyl": 35600, "phyllis</w>": 37844, "phys": 3734, "phys</w>": 37894, "physi": 13782, "physic": 46641, "physical": 44127, "physical</w>": 6671, "physically</w>": 18105, "physician</w>": 21055, "physicians</w>": 26702, "physicist</w>": 29052, "physics</w>": 9369, "physio": 29574, "physio</w>": 29177, "physiology</w>": 32349, "physique</w>": 42884, "phyto": 42197, "pi": 741, "pi</w>": 5357, "pia</w>": 8918, "pian</w>": 24637, "pianist</w>": 21048, "piano": 49278, "piano</w>": 7894, "pianos</w>": 47904, "piazza</w>": 28496, "pic": 901, "pic</w>": 1282, "pical</w>": 5482, "picard</w>": 48507, "picasso</w>": 21481, "piccad": 33876, "piccadilly</w>": 37287, "piccollage</w>": 43621, "pick": 6379, "pick</w>": 3142, "picked</w>": 6018, "picker</w>": 43105, "pickering</w>": 47605, "picket</w>": 33559, "picking</w>": 9545, "pickle</w>": 24570, "pickled</w>": 21705, "pickles</w>": 25001, "picks</w>": 8551, "pickup</w>": 15382, "pickups</w>": 33383, "picnic</w>": 12007, "pico</w>": 23363, "picoftheday</w>": 18319, "pics</w>": 2559, "pict</w>": 18778, "pictorial</w>": 40640, "picture": 11663, "picture</w>": 1674, "pictured</w>": 7647, "pictures</w>": 3646, "picturesque</w>": 24894, "pid</w>": 5225, "piday</w>": 48056, "pie": 12065, "pie</w>": 5319, "piece": 39632, "piece</w>": 2754, "pieces</w>": 6194, "pied": 24686, "pied</w>": 12713, "piedmont</w>": 39691, "pier": 5641, "pier</w>": 11348, "pierc": 49216, "pierce": 48462, "pierce</w>": 16782, "pierced</w>": 32799, "piercing</w>": 22557, "piero</w>": 43125, "pierre": 34670, "pierre</w>": 11985, "piers</w>": 29030, "pies</w>": 6898, "pieter</w>": 44801, "pietro</w>": 42169, "piff</w>": 40719, "pig": 12009, "pig</w>": 9619, "pigeon</w>": 18008, "pigeons</w>": 32910, "piggy</w>": 28245, "pigment</w>": 40284, "pigs</w>": 16228, "pik</w>": 48539, "pika</w>": 47372, "pikach": 27268, "pikachu</w>": 28107, "pike": 33457, "pike</w>": 14011, "pil": 2893, "pil</w>": 20645, "pilates</w>": 29518, "pile": 44403, "pile</w>": 13930, "piled</w>": 26873, "piles</w>": 31968, "pilgri": 13966, "pilgrim</w>": 32662, "pilgrimage</w>": 24335, "pilgrims</w>": 31370, "piling</w>": 43050, "pilip": 27234, "pilipinas</w>": 32392, "pill": 14830, "pill</w>": 19226, "pillar</w>": 17322, "pillars</w>": 22054, "pillow": 42237, "pillow</w>": 12182, "pillows</w>": 26499, "pills</w>": 23964, "pilo": 37526, "pilot": 31619, "pilot</w>": 6687, "pilots</w>": 15586, "pilsner</w>": 47153, "pim": 15285, "pim</w>": 35472, "pimp": 35789, "pin": 2629, "pin</w>": 5164, "pinball</w>": 31679, "pinch</w>": 26114, "pine": 9398, "pine</w>": 7374, "pineapple</w>": 14831, "pines</w>": 20338, "ping": 23720, "ping</w>": 2089, "pinion</w>": 40557, "pink": 11151, "pink</w>": 3360, "pinkfloyd</w>": 48520, "pinky</w>": 29803, "pinn": 31448, "pinnacle</w>": 32754, "pinned</w>": 12165, "pinning</w>": 44515, "pino</w>": 36633, "pinot": 41399, "pinot</w>": 21146, "pinoy": 43578, "pinoy</w>": 35258, "pins</w>": 14619, "pinst": 41173, "pint": 42537, "pint</w>": 13584, "pinterest</w>": 15379, "pinto</w>": 35992, "pints</w>": 27935, "pinup</w>": 37349, "pio</w>": 22108, "pion": 36728, "pion</w>": 29190, "pione": 7975, "pioneer": 34892, "pioneer</w>": 12459, "pioneering</w>": 25933, "pioneers</w>": 22383, "pious</w>": 42441, "pip</w>": 30854, "pipe": 29333, "pipe</w>": 10459, "pipel": 12387, "pipeline</w>": 14151, "pipelines</w>": 39683, "piper": 47052, "piper</w>": 16293, "pipes</w>": 16991, "piping</w>": 40744, "pippa</w>": 47672, "pir": 4351, "pir</w>": 38899, "piracy</w>": 39452, "piran": 49034, "pirate": 38680, "pirate</w>": 13592, "pirates</w>": 10442, "pire": 16613, "pires</w>": 14988, "pis": 9230, "pis</w>": 44441, "pisa</w>": 43632, "pisces</w>": 45982, "piss</w>": 20818, "pissed</w>": 17989, "pist": 15556, "pist</w>": 32826, "pistachi": 29760, "pistachio</w>": 36320, "pistol</w>": 20480, "piston</w>": 48236, "pistons</w>": 27242, "pistor": 48162, "pit": 2946, "pit</w>": 7476, "pita</w>": 27070, "pitbull</w>": 25295, "pitch": 8992, "pitch</w>": 5872, "pitched</w>": 28447, "pitcher</w>": 13445, "pitchers</w>": 27835, "pitches</w>": 21005, "pitching</w>": 16455, "piti": 47568, "pits</w>": 24144, "pitt": 7607, "pitt</w>": 15599, "pitts": 9531, "pittsburgh</w>": 10453, "pity</w>": 24380, "pius</w>": 39988, "pivo": 18009, "pivot</w>": 31805, "pivotal</w>": 31432, "pix": 6185, "pix</w>": 13088, "pixar</w>": 27493, "pixel": 14384, "pixel</w>": 13241, "pixelart</w>": 18516, "pixels</w>": 34099, "pixie</w>": 35573, "piyu": 30772, "piyush": 36191, "piyushgoyal</w>": 45318, "pizz": 3897, "pizza</w>": 4474, "pizzas</w>": 30647, "pizzeria</w>": 44174, "pj": 12524, "pj</w>": 17179, "pjnet</w>": 22011, "pjs</w>": 36009, "pk": 10149, "pk</w>": 10991, "pkg</w>": 49011, "pkk</w>": 47480, "pknot</w>": 41779, "pkwy</w>": 36827, "pl": 712, "pl</w>": 5678, "pla": 841, "pla</w>": 19945, "plac": 2331, "place": 14884, "place</w>": 1445, "placed</w>": 9729, "placement</w>": 16724, "placements</w>": 43885, "placer</w>": 49170, "places</w>": 4448, "placing</w>": 18531, "plague</w>": 25360, "plaid</w>": 23291, "plain": 22776, "plain</w>": 10709, "plains</w>": 16345, "plan": 1740, "plan</w>": 2970, "pland</w>": 24801, "plane": 22728, "plane</w>": 5363, "planes</w>": 12581, "planet": 16833, "planet</w>": 5172, "planetary</w>": 28361, "planets</w>": 22315, "plank</w>": 30991, "plankton</w>": 48249, "plann": 6409, "planned</w>": 8169, "planner</w>": 18083, "planners</w>": 33664, "planning</w>": 4446, "plano</w>": 34063, "plans</w>": 4181, "plant": 8521, "plant</w>": 3912, "plantation</w>": 20014, "plantbased</w>": 33720, "planted</w>": 14286, "planter</w>": 34453, "planters</w>": 43661, "planting</w>": 13922, "plants</w>": 5829, "plaque</w>": 16097, "plaques</w>": 45610, "plar</w>": 26754, "plas": 45673, "plasma</w>": 24999, "plaster</w>": 31980, "plastic": 15645, "plastic</w>": 6102, "plasticpollution</w>": 47129, "plastics</w>": 20999, "plasticsurgery</w>": 48555, "plat": 3172, "plata</w>": 46456, "plate": 28744, "plate</w>": 5135, "plateau</w>": 29301, "plated</w>": 21161, "plates</w>": 11485, "platform</w>": 5549, "platforms</w>": 13551, "platin": 10267, "plating</w>": 44564, "platinum</w>": 10979, "plato</w>": 41101, "platoon</w>": 41254, "platt": 44459, "platt</w>": 40097, "platte</w>": 46785, "platter</w>": 29071, "platz</w>": 40878, "plau": 39139, "play": 1222, "play</w>": 1453, "playa</w>": 23756, "playable</w>": 33885, "playback</w>": 39194, "playbook</w>": 34856, "playboy</w>": 24383, "played</w>": 3432, "player": 24503, "player</w>": 2477, "players</w>": 3030, "playful</w>": 23871, "playground</w>": 15861, "playhouse</w>": 23254, "playin</w>": 24674, "playing": 47368, "playing</w>": 1629, "playlist</w>": 9180, "playlists</w>": 47183, "playo": 5804, "playoff</w>": 9655, "playoffs</w>": 9548, "plays</w>": 5134, "playstation</w>": 11332, "playtime</w>": 43037, "playwright</w>": 32070, "plaza</w>": 8943, "plc</w>": 16827, "ple": 926, "ple</w>": 1619, "plea</w>": 21956, "plead</w>": 47539, "pleads</w>": 31425, "plear": 21362, "pleas": 8481, "pleas</w>": 48740, "pleasant</w>": 12271, "please": 41074, "please</w>": 1474, "pleased</w>": 6107, "pleasing</w>": 32893, "pleasure</w>": 5854, "pleasures</w>": 29513, "pledge</w>": 11507, "pledged</w>": 36799, "pledges</w>": 26746, "pledis</w>": 41202, "plein</w>": 43429, "plenary</w>": 19891, "plenty</w>": 7524, "pler</w>": 17677, "ples</w>": 6248, "pless": 39821, "pless</w>": 17059, "plets</w>": 43230, "plex": 23765, "plex</w>": 15241, "pley</w>": 19543, "pli": 30001, "pli</w>": 45797, "plic": 5806, "plicity</w>": 19823, "plight</w>": 40317, "plin": 44531, "plin</w>": 32335, "pline</w>": 25376, "pling</w>": 12899, "plings</w>": 31184, "pll": 47629, "pll</w>": 25266, "pln</w>": 48755, "plo": 1778, "plo</w>": 43523, "plor": 34695, "plot</w>": 9918, "plots</w>": 25672, "plotting</w>": 30751, "plough": 33811, "plow</w>": 38363, "pls</w>": 5572, "plu": 2052, "plug</w>": 12628, "plugged</w>": 23261, "plugin</w>": 31278, "plugins</w>": 48797, "plugs</w>": 28083, "plum": 26267, "plum</w>": 16202, "plumb": 21769, "plumber</w>": 43478, "plumbing</w>": 24647, "plume</w>": 39495, "plun": 15122, "plunge</w>": 26506, "plur": 44664, "plus</w>": 3097, "plush</w>": 18926, "pluto</w>": 26380, "ply": 17249, "ply</w>": 28705, "plying</w>": 36071, "plym": 11907, "plymouth</w>": 13786, "plz</w>": 10538, "pm": 13699, "pm</w>": 990, "pmi</w>": 41206, "pmln</w>": 23208, "pmo": 18782, "pmoindia</w>": 20374, "pms</w>": 44223, "pn": 14431, "pn</w>": 13774, "pnc</w>": 37148, "pne": 30966, "pneu": 28714, "pneumonia</w>": 42906, "png</w>": 20992, "pnp</w>": 25972, "pnpp": 42175, "pnw</w>": 31521, "po": 628, "po</w>": 3057, "poa</w>": 43912, "poached</w>": 27665, "poaching</w>": 35140, "poc": 13232, "poc</w>": 27780, "pocaly": 37987, "pocalypse</w>": 42307, "poche": 38336, "poche</w>": 39022, "pocket": 29147, "pocket</w>": 8504, "pockets</w>": 19566, "pocon": 41850, "pod": 3583, "pod</w>": 7446, "podcast": 39654, "podcast</w>": 4294, "podcasting</w>": 40106, "podcasts</w>": 19392, "pode": 33368, "poder": 24960, "podernfamily</w>": 26620, "podi": 32853, "podium</w>": 14093, "pods</w>": 18776, "poe": 4746, "poe</w>": 19254, "poem</w>": 9436, "poems</w>": 15577, "poet": 41019, "poet</w>": 9872, "poetic</w>": 26365, "poetry": 20192, "poetry</w>": 6038, "poetryday</w>": 39255, "poets</w>": 19804, "pof": 40850, "poff</w>": 28236, "pogba</w>": 25998, "poign": 29682, "poignant</w>": 32138, "poin": 9074, "point": 13280, "point</w>": 2301, "pointe</w>": 24631, "pointed</w>": 20703, "pointer</w>": 29883, "pointers</w>": 36760, "pointing</w>": 19233, "pointless</w>": 33586, "points</w>": 3396, "pois": 17008, "poise</w>": 45087, "poised</w>": 27354, "poison": 30722, "poison</w>": 17074, "poisoned</w>": 43624, "poisoning</w>": 25750, "poisonous</w>": 37131, "pok": 15387, "poke": 6892, "poke</w>": 23186, "pokemon": 16239, "pokemon</w>": 9528, "pokemongo</w>": 23985, "poker": 30735, "poker</w>": 11865, "pokes</w>": 40221, "poking</w>": 49169, "poké": 20656, "pokémon</w>": 22066, "pol": 977, "pol</w>": 7649, "pola</w>": 43876, "poland</w>": 9834, "polar": 21432, "polar</w>": 12214, "polari": 27919, "polaris</w>": 37965, "polarized</w>": 48437, "polaro": 25237, "polaroid</w>": 30427, "poldark</w>": 41322, "pole": 26682, "pole</w>": 8170, "poles</w>": 22585, "poli": 9675, "poli</w>": 5414, "polic": 16126, "police": 15535, "police</w>": 2120, "policeman</w>": 37713, "policemen</w>": 47946, "polici": 10819, "policies</w>": 10993, "policing</w>": 20969, "policy": 30173, "policy</w>": 4660, "polio</w>": 30533, "polis</w>": 16133, "polish": 46941, "polish</w>": 9632, "polished</w>": 21478, "polishing</w>": 43629, "polit": 2247, "politan</w>": 15337, "polite</w>": 31497, "politi": 40597, "politic": 33333, "political": 37744, "political</w>": 4197, "politically</w>": 24323, "politician</w>": 15960, "politicians</w>": 12914, "politico</w>": 39403, "politics</w>": 4929, "polk</w>": 33317, "polka</w>": 29476, "poll</w>": 7032, "pollen</w>": 27651, "pollin": 19152, "pollinators</w>": 36599, "polling</w>": 18024, "pollo</w>": 42755, "pollock</w>": 37614, "polls</w>": 11813, "pollu": 8370, "polluted</w>": 43346, "pollution</w>": 10384, "polly</w>": 31204, "polo": 35928, "polo</w>": 10229, "poly": 6833, "poly</w>": 18367, "polye": 31730, "polyester</w>": 38514, "polym": 23626, "polymer</w>": 29993, "polyne": 38892, "polyvore</w>": 24771, "pom": 7548, "pom</w>": 24280, "pome": 27963, "pomegran": 29326, "pomegranate</w>": 32415, "pomer": 35156, "pomona</w>": 41690, "pompe": 18352, "pompeii</w>": 47775, "pompeo</w>": 34351, "pompey</w>": 35079, "pon": 3809, "pon</w>": 22391, "ponce</w>": 43637, "pond</w>": 10750, "ponder</w>": 36863, "pondering</w>": 47395, "ponds</w>": 31033, "pone</w>": 32183, "pong": 40546, "pong</w>": 17710, "ponies</w>": 34157, "pons</w>": 41255, "pont": 47563, "pont</w>": 22997, "ponte</w>": 40892, "ponti": 15527, "pontiac</w>": 25373, "pontifex</w>": 33566, "ponty": 45152, "pony": 24438, "pony</w>": 12678, "ponytail</w>": 43265, "poo": 6601, "poo</w>": 14389, "pooch</w>": 37037, "poodle</w>": 34961, "pooh</w>": 27103, "pooja</w>": 35676, "pool": 12484, "pool</w>": 2831, "poole</w>": 26290, "pools</w>": 18736, "poolside</w>": 35509, "poon": 33799, "poon</w>": 36178, "poop</w>": 23310, "poor": 14528, "poor</w>": 3665, "poorest</w>": 40771, "poorly</w>": 21101, "pop": 6530, "pop</w>": 2852, "popart</w>": 47425, "popcorn</w>": 15034, "pope": 16994, "pope</w>": 9283, "popefrancis</w>": 37254, "poplar</w>": 38726, "popo": 38835, "popo</w>": 35572, "popp": 13156, "popped</w>": 14934, "poppies</w>": 30385, "poppin</w>": 28536, "popping</w>": 18152, "poppins</w>": 41216, "poppy": 32194, "poppy</w>": 15447, "pops</w>": 11705, "popsic": 38481, "popu": 3785, "popul": 6593, "popular": 15854, "popular</w>": 4368, "popularity</w>": 19235, "populated</w>": 38420, "population</w>": 8423, "populations</w>": 23797, "populism</w>": 48998, "populist</w>": 49376, "popup</w>": 33053, "por": 817, "por</w>": 7697, "pora</w>": 23537, "porcel": 19409, "porcelain</w>": 20451, "porch</w>": 17154, "pore</w>": 28267, "pork": 40379, "pork</w>": 7897, "poro": 48110, "porridge</w>": 34924, "porsch": 48009, "porsche": 44049, "porsche</w>": 8783, "port": 1641, "port</w>": 1418, "porta</w>": 45037, "portable</w>": 11949, "portage</w>": 32087, "portal</w>": 14982, "porte</w>": 28654, "ported</w>": 16879, "porter": 28319, "porter</w>": 10318, "porters</w>": 15670, "portfoli": 45766, "portfolio</w>": 11938, "porth": 37425, "porti": 45760, "porting</w>": 26052, "portion</w>": 13739, "portions</w>": 22914, "portland": 38366, "portland</w>": 8880, "portman</w>": 34755, "porto": 24853, "porto</w>": 18947, "portobello</w>": 48025, "portra": 4175, "portrait": 39312, "portrait</w>": 5352, "portraits</w>": 14203, "portray</w>": 46282, "portrayal</w>": 39238, "portrayed</w>": 36093, "ports</w>": 7734, "portsm": 17063, "portsmouth</w>": 19074, "portu": 7159, "portugal</w>": 9503, "portugue": 17498, "portuguese</w>": 18019, "pos": 1780, "pos</w>": 11839, "pose": 25478, "pose</w>": 4230, "posed</w>": 5206, "posei": 47270, "poser</w>": 46899, "poses</w>": 9773, "posey</w>": 34852, "posh</w>": 26748, "posing</w>": 10518, "posit": 28793, "positi": 7895, "position</w>": 4657, "positioned</w>": 34482, "positioning</w>": 30657, "positions</w>": 12188, "positive": 21811, "positive</w>": 4844, "positively</w>": 24688, "positivity</w>": 19966, "poss</w>": 39745, "posse": 17414, "posse</w>": 28413, "possess</w>": 36810, "possessed</w>": 36220, "possession</w>": 16154, "possessions</w>": 40588, "possi": 2521, "possibilities</w>": 17932, "possibility</w>": 18517, "possible</w>": 3134, "possibly</w>": 8601, "possum</w>": 38575, "post": 3489, "post</w>": 1549, "postage</w>": 27570, "postal</w>": 21687, "postcard</w>": 14785, "postcards</w>": 23922, "postdoc</w>": 41013, "posted</w>": 4752, "poster": 22881, "poster</w>": 3574, "posters</w>": 9673, "postgame</w>": 34873, "postgraduate</w>": 31997, "posthum": 42410, "posting</w>": 7559, "postman</w>": 38285, "postpon": 23247, "postponed</w>": 25097, "posts</w>": 7824, "postseason</w>": 24521, "posture</w>": 29681, "posure</w>": 35539, "pot": 3547, "pot</w>": 5168, "potam": 45825, "potassi": 36889, "potassium</w>": 37147, "potat": 5975, "potato</w>": 8527, "potatoes</w>": 11567, "potd</w>": 28765, "pote</w>": 41869, "poten": 4454, "potent</w>": 26082, "potenti": 44104, "potential</w>": 5100, "potentially</w>": 16508, "potholes</w>": 47506, "potion</w>": 46055, "potom": 38848, "potomac</w>": 43372, "pots</w>": 19234, "pott": 28698, "potted</w>": 48581, "potter": 24975, "potter</w>": 9026, "pottery</w>": 18396, "potts</w>": 39839, "potty</w>": 43569, "potus</w>": 8740, "pou": 9423, "pouch</w>": 26811, "poul": 22485, "poultry</w>": 31005, "poun": 33719, "pound": 33809, "pound</w>": 10674, "pounding</w>": 46544, "pounds</w>": 10752, "pour": 33112, "pour</w>": 8180, "poured</w>": 26621, "pouring</w>": 16098, "pours</w>": 26005, "pout</w>": 39621, "poutine</w>": 43768, "pov</w>": 25731, "pover": 8432, "pover</w>": 29464, "poverty</w>": 9095, "pow": 1317, "pow</w>": 17745, "powder": 32427, "powder</w>": 9674, "powe": 36955, "powell</w>": 13305, "power": 2789, "power</w>": 1807, "powerball</w>": 47803, "powered": 45442, "powered</w>": 7332, "powerful</w>": 4875, "powerhouse</w>": 22858, "powering</w>": 16231, "powerof": 31961, "powerpoint</w>": 38940, "powerrangers</w>": 40620, "powers</w>": 9422, "pox</w>": 43649, "poy</w>": 34737, "poyn": 47655, "poz": 39953, "pp": 604, "pp</w>": 4186, "ppa</w>": 10416, "ppard</w>": 23391, "ppc</w>": 27778, "ppe": 24573, "ppe</w>": 11867, "pped</w>": 1873, "ppel</w>": 46523, "ppen</w>": 30663, "pper": 6719, "pper</w>": 2440, "ppers</w>": 5232, "ppery</w>": 27833, "ppet</w>": 20744, "ppets</w>": 25849, "ppg</w>": 27433, "ppi</w>": 9594, "ppie</w>": 33795, "ppin</w>": 8076, "pping": 22214, "pping</w>": 1682, "ppings</w>": 35687, "ppl</w>": 6758, "pple</w>": 12302, "ppm</w>": 42053, "ppo</w>": 10215, "ppor": 37613, "ppp</w>": 14017, "pps</w>": 10683, "ppv</w>": 38864, "ppy": 30360, "ppy</w>": 3860, "pr": 766, "pr</w>": 4150, "pra": 1865, "pra</w>": 19285, "prab": 17901, "prabhas</w>": 29959, "prabhu</w>": 31529, "prac": 2243, "practi": 29995, "practic": 5495, "practical</w>": 10792, "practically</w>": 25588, "practice</w>": 3349, "practiced</w>": 36749, "practices</w>": 9040, "practicing</w>": 12750, "practise</w>": 38938, "practising</w>": 36478, "practiti": 19909, "practitioner</w>": 32591, "practitioners</w>": 29045, "prada</w>": 29456, "pradesh</w>": 15384, "prado</w>": 44141, "prag": 31025, "prague</w>": 14940, "prairi": 12629, "prairie</w>": 14753, "praise</w>": 10013, "praised</w>": 27649, "praises</w>": 23049, "praising</w>": 36961, "prakash": 43708, "prakash</w>": 25366, "pram": 47774, "pran": 20048, "prank</w>": 23654, "pras</w>": 41562, "prasad</w>": 29562, "prat": 23069, "prati": 45773, "pratt": 37863, "pratt</w>": 23396, "prawn</w>": 33102, "prawns</w>": 34903, "pray": 12671, "pray</w>": 6041, "prayed</w>": 34665, "prayer": 41452, "prayer</w>": 6583, "prayers</w>": 8393, "prayfor": 18443, "praying</w>": 11550, "prays</w>": 46602, "prc</w>": 28781, "pre": 679, "pre</w>": 2900, "preach</w>": 22545, "preacher</w>": 29357, "preaching</w>": 23642, "precau": 36532, "precautions</w>": 47845, "prece": 15361, "preci": 5470, "precin": 27908, "precinct</w>": 32587, "precious</w>": 8226, "precipit": 27463, "precipitation</w>": 33399, "precise</w>": 24457, "precisely</w>": 34954, "precision": 44021, "precision</w>": 15621, "pred": 40370, "predat": 13364, "predator</w>": 20653, "predators</w>": 25569, "prede": 38454, "predecess": 38963, "predic": 4876, "predict</w>": 16900, "predictable</w>": 25344, "predicted</w>": 18702, "predicting</w>": 30414, "prediction</w>": 16296, "predictions</w>": 15125, "predictive</w>": 29798, "predicts</w>": 25960, "preds</w>": 40125, "pree</w>": 47026, "preet</w>": 30131, "prefe": 14542, "prefecture</w>": 32890, "prefer": 33426, "prefer</w>": 11450, "preference</w>": 35057, "preferences</w>": 38118, "preferred</w>": 18772, "prefers</w>": 38528, "pregame</w>": 18575, "pregn": 7190, "pregnancy</w>": 12769, "pregnant</w>": 11195, "prehistoric</w>": 32750, "prejudice</w>": 28337, "preli": 15523, "prelimin": 19990, "preliminary</w>": 20997, "prelims</w>": 43223, "prelude</w>": 42966, "prem": 32090, "prem</w>": 21724, "premature</w>": 39253, "premi": 2413, "premier": 16996, "premier</w>": 5539, "premiere</w>": 5367, "premiered</w>": 27652, "premieres</w>": 19907, "premiering</w>": 32615, "premierleague</w>": 22608, "premiers</w>": 44883, "premiership</w>": 23665, "premiosm": 38460, "premiosmtvmiaw</w>": 38630, "premise</w>": 45952, "premises</w>": 27266, "premium</w>": 8011, "pren": 20801, "preneur</w>": 46288, "preorder</w>": 16703, "preorders</w>": 45985, "prep": 6430, "prep</w>": 7277, "prepa": 26270, "prepaid</w>": 42934, "prepar": 4968, "preparation</w>": 11651, "preparations</w>": 19135, "prepare</w>": 7014, "prepared</w>": 7677, "preparedness</w>": 29492, "prepares</w>": 16375, "preparing</w>": 7365, "prepped</w>": 34379, "prepping</w>": 16459, "preps</w>": 14765, "prequel</w>": 40461, "pres": 1385, "pres</w>": 8529, "presale</w>": 27135, "presby": 30447, "presbyter": 33959, "presbyterian</w>": 35370, "preschool</w>": 24354, "prescott</w>": 29392, "prescri": 14851, "prescribed</w>": 36968, "prescription</w>": 23061, "preseason</w>": 13813, "presen": 16742, "presence</w>": 8848, "present": 2344, "present</w>": 2881, "presentation</w>": 4594, "presentations</w>": 16998, "presented</w>": 4587, "presenter</w>": 18587, "presenters</w>": 32759, "presenting</w>": 5339, "presents</w>": 4215, "preserv": 17616, "preservation</w>": 21074, "preserve</w>": 15570, "preserved</w>": 23161, "preserves</w>": 44881, "preserving</w>": 32315, "presi": 1697, "presiden": 43374, "presidency</w>": 18077, "president": 19900, "president</w>": 1940, "presidente</w>": 47363, "presidenti": 48297, "presidential</w>": 8503, "presidents</w>": 16726, "presiding</w>": 45298, "presley</w>": 30013, "press": 4124, "press</w>": 2124, "pressed</w>": 20080, "presser</w>": 27826, "presses</w>": 33748, "pressing</w>": 20893, "pressure</w>": 6083, "pressures</w>": 38487, "prest": 41840, "presti": 12245, "prestige</w>": 29328, "prestigious</w>": 15888, "presto</w>": 42211, "preston": 37335, "preston</w>": 15179, "presu": 21667, "presumably</w>": 42562, "pret": 9652, "preten": 15871, "pretend</w>": 18111, "pretending</w>": 21306, "pretoria</w>": 36080, "prett": 46667, "prettier</w>": 31745, "prettiest</w>": 22866, "pretty": 18286, "pretty</w>": 2111, "pretz": 24890, "pretzel</w>": 36707, "pretzels</w>": 45468, "prev": 20274, "prevail</w>": 31637, "prevalence</w>": 41729, "prevalent</w>": 46260, "preven": 29382, "prevent": 26436, "prevent</w>": 7968, "preventable</w>": 44250, "prevented</w>": 35356, "preventing</w>": 21756, "prevention</w>": 9500, "preventive</w>": 40949, "prevents</w>": 31746, "preview</w>": 4449, "previews</w>": 20279, "previous</w>": 9252, "previously</w>": 13359, "prey</w>": 17131, "prez</w>": 17956, "pri": 955, "pri</w>": 23400, "pric": 24275, "price": 13254, "price</w>": 2827, "priced</w>": 16934, "priceless</w>": 15743, "prices</w>": 5954, "pricing</w>": 14800, "prick": 43921, "prick</w>": 46516, "pride": 15323, "pride</w>": 3436, "pridemonth</w>": 41410, "prie": 22477, "priest": 38756, "priest</w>": 14222, "priests</w>": 30005, "prim": 22004, "prima": 35611, "prima</w>": 33277, "primal</w>": 36604, "primar": 21579, "primaries</w>": 46126, "primarily</w>": 29465, "primark</w>": 48329, "primary": 35024, "primary</w>": 5814, "primavera</w>": 44899, "prime": 14162, "prime</w>": 5183, "primed</w>": 45694, "primer</w>": 22388, "primetime</w>": 29763, "primitive</w>": 37467, "primo</w>": 43215, "primrose</w>": 45891, "prin": 1588, "prince": 9457, "prince</w>": 4735, "princes": 45329, "princes</w>": 30136, "princess": 24123, "princess</w>": 5079, "princesses</w>": 34161, "princeton</w>": 22433, "princi": 5129, "principal": 33599, "principal</w>": 8860, "principals</w>": 27524, "principle</w>": 19595, "principles</w>": 13755, "print": 17851, "print</w>": 3557, "printable</w>": 29648, "printed</w>": 7978, "printer</w>": 14521, "printers</w>": 27881, "printing</w>": 7369, "printmaking</w>": 38669, "prints</w>": 7704, "prior": 20328, "prior</w>": 10572, "priorit": 47773, "prioriti": 28822, "priorities</w>": 15232, "prioritize</w>": 46715, "priority</w>": 12451, "priory</w>": 38665, "prisc": 32468, "priscilla</w>": 42396, "prise</w>": 23343, "prism": 49311, "prism</w>": 34356, "prison": 9281, "prison</w>": 6622, "prisoner</w>": 21427, "prisoners</w>": 17460, "prisons</w>": 26607, "pristine</w>": 30618, "prit": 41668, "prit</w>": 37523, "prith": 39173, "prius</w>": 43561, "priv": 3270, "privacy</w>": 10437, "private": 20362, "private</w>": 4439, "privately</w>": 32970, "privati": 27379, "privi": 8367, "privileg": 18015, "privilege</w>": 11537, "privileged</w>": 18166, "prix</w>": 10875, "priya</w>": 31275, "priyan": 16488, "priyanka</w>": 31959, "priyankach": 30030, "priyankachopra</w>": 30264, "prize": 48222, "prize</w>": 4521, "prized</w>": 38769, "prizes</w>": 9268, "prk</w>": 37094, "pro": 644, "pro</w>": 2630, "proactive</w>": 33364, "prob": 17706, "prob</w>": 24007, "probab": 3907, "probability</w>": 32637, "probable</w>": 42444, "probably</w>": 4047, "probation</w>": 36531, "probe</w>": 14359, "probes</w>": 48564, "probiotics</w>": 49395, "proble": 2719, "problem</w>": 4324, "problematic</w>": 33767, "problems</w>": 4671, "probs</w>": 16330, "probz</w>": 34243, "proc": 38417, "proce": 4076, "procedu": 18204, "procedural</w>": 48177, "procedure</w>": 20163, "procedures</w>": 21109, "proceed": 26664, "proceed</w>": 33894, "proceedings</w>": 26953, "proceeds</w>": 11882, "process": 17291, "process</w>": 4078, "processed</w>": 23816, "processes</w>": 15169, "processing</w>": 11737, "procession</w>": 26288, "processor</w>": 22838, "processors</w>": 43634, "proclaimed</w>": 34489, "proclamation</w>": 32065, "procra": 25361, "procrastin": 25586, "procrastination</w>": 42825, "procreate</w>": 39336, "proctor</w>": 47204, "procu": 21001, "procurement</w>": 23733, "prod": 44349, "prod</w>": 11991, "prodi": 27759, "prodigy</w>": 31973, "produ": 27852, "produc": 1471, "produce</w>": 7529, "produced</w>": 7479, "producer</w>": 7064, "producers</w>": 13883, "produces</w>": 19940, "producing</w>": 13579, "product": 32602, "product</w>": 4306, "production</w>": 4146, "productions</w>": 14166, "productive</w>": 9697, "productivity</w>": 12800, "products</w>": 3964, "prof": 15043, "prof</w>": 5488, "profe": 2611, "profess": 5486, "professi": 3705, "profession": 8104, "profession</w>": 19671, "professional": 46007, "professional</w>": 4774, "professionalism</w>": 41252, "professionally</w>": 33892, "professionals</w>": 10165, "professor": 47302, "professor</w>": 6092, "professors</w>": 27758, "profici": 34685, "profile": 14291, "profile</w>": 6444, "profiles</w>": 22070, "profiling</w>": 37123, "profit": 16941, "profit</w>": 7909, "profitable</w>": 25465, "profits</w>": 13410, "profound": 48245, "profound</w>": 22998, "profs</w>": 19260, "prog</w>": 22219, "progno": 46070, "program": 4162, "program</w>": 2737, "programme</w>": 6322, "programmer</w>": 37001, "programmes</w>": 20468, "programming</w>": 10831, "programs</w>": 7345, "progre": 7069, "progress</w>": 4421, "progressi": 23297, "progressing</w>": 32346, "progression</w>": 24772, "progressive</w>": 12208, "progressives</w>": 41709, "prohi": 41124, "prohib": 45040, "prohibition</w>": 34440, "proj</w>": 39156, "proje": 48345, "projec": 1610, "project": 15911, "project</w>": 1965, "projected</w>": 22873, "projection</w>": 22384, "projections</w>": 34638, "projector</w>": 27816, "projects</w>": 5090, "proli": 19710, "prolife</w>": 32126, "prolifer": 39018, "prolific</w>": 27839, "prolly</w>": 45968, "prolon": 35379, "prolonged</w>": 41972, "prom": 40363, "prom</w>": 7944, "prome": 34355, "promen": 33578, "promenade</w>": 35522, "promethe": 44183, "promin": 35217, "prominent</w>": 19172, "promis": 3963, "promise</w>": 6745, "promised</w>": 11516, "promises</w>": 12064, "promising</w>": 14183, "promo": 3037, "promo</w>": 6755, "promos</w>": 35044, "promote": 47384, "promote</w>": 8003, "promoted</w>": 16395, "promoter</w>": 33081, "promotes</w>": 20169, "promoting</w>": 9695, "promotion</w>": 9259, "promotional</w>": 17619, "promotions</w>": 19142, "promp": 11671, "prompt</w>": 20198, "prompted</w>": 45746, "prompts</w>": 33490, "proms</w>": 37759, "pron": 13285, "prone</w>": 30964, "pronoun": 23022, "pronounce</w>": 40489, "pronounced</w>": 34109, "pronto</w>": 44296, "proof": 17020, "proof</w>": 5248, "proofing</w>": 35679, "proofs</w>": 41023, "prop": 19123, "prop</w>": 16254, "propag": 12151, "propaganda</w>": 14718, "propane</w>": 45546, "propel</w>": 48439, "propeller</w>": 47404, "proper": 3577, "proper</w>": 8205, "properly</w>": 12560, "properties</w>": 10922, "property": 26486, "property</w>": 5043, "prophe": 9662, "prophecy</w>": 32501, "prophet</w>": 15549, "prophetic</w>": 47476, "prophets</w>": 39441, "propor": 35016, "proportion</w>": 35775, "proportions</w>": 39391, "propos": 9455, "proposal</w>": 12139, "proposals</w>": 20568, "propose</w>": 28471, "proposed</w>": 10615, "proposes</w>": 27133, "proposing</w>": 42631, "proposition</w>": 44780, "propri": 28243, "props</w>": 15249, "propulsion</w>": 49380, "pros": 33925, "pros</w>": 14147, "prosciutto</w>": 46565, "prose": 47063, "prose</w>": 28675, "prosecco</w>": 28839, "prosecu": 12136, "prosecution</w>": 30902, "prosecutor</w>": 23736, "prosecutors</w>": 31656, "prosp": 24242, "prospec": 12693, "prospect</w>": 11211, "prospective</w>": 28034, "prospects</w>": 15372, "prosper": 16121, "prosper</w>": 33526, "prosperity</w>": 17203, "prosperous</w>": 28252, "prost</w>": 47923, "prostate</w>": 28808, "prostatec": 49064, "prosthetic</w>": 44602, "prostitu": 37333, "protag": 28950, "protagonist</w>": 38183, "prote": 1845, "protec": 5640, "protect": 25563, "protect</w>": 4817, "protected</w>": 12266, "protecting</w>": 11710, "protection</w>": 6238, "protections</w>": 33772, "protective</w>": 17028, "protector</w>": 20441, "protectors</w>": 45039, "protects</w>": 21889, "protein</w>": 8088, "proteins</w>": 28661, "protest</w>": 6279, "protestant</w>": 46945, "protested</w>": 48089, "protester</w>": 42073, "protesters</w>": 12660, "protesting</w>": 18788, "protestors</w>": 27822, "protests</w>": 12450, "proto": 8672, "proto</w>": 44958, "protocol</w>": 19938, "protocols</w>": 39631, "proton</w>": 40009, "prototype</w>": 16675, "prototyping</w>": 42081, "prou": 5739, "proud": 11080, "proud</w>": 1679, "prouder</w>": 39585, "proudest</w>": 46806, "proudly</w>": 11203, "proudof": 48184, "proudtobe": 35043, "prov": 23772, "prov</w>": 35021, "prove</w>": 10107, "proved</w>": 16473, "proven": 35405, "proven</w>": 14569, "provence</w>": 28067, "prover": 18312, "proverb</w>": 34419, "proverbs</w>": 27016, "proves</w>": 16119, "provi": 2289, "provide</w>": 4832, "provided</w>": 9046, "providence</w>": 19331, "provider</w>": 14409, "providers</w>": 17120, "provides</w>": 7161, "providing</w>": 7250, "provin": 12074, "province</w>": 8978, "provinces</w>": 35050, "provincial</w>": 16002, "proving</w>": 18055, "provision</w>": 30148, "provisional</w>": 36008, "provisions</w>": 39269, "provo": 15367, "provoc": 31618, "provocative</w>": 43809, "provoking</w>": 25510, "provost</w>": 36627, "prow": 38737, "prowrestling</w>": 39825, "prox": 41616, "proxim": 31436, "proximity</w>": 38298, "proxy</w>": 31680, "prs</w>": 23879, "pru": 12961, "pruitt</w>": 39453, "prun": 29029, "pruning</w>": 48133, "pry": 31965, "pryor</w>": 43375, "ps": 3982, "ps</w>": 814, "psa</w>": 14031, "psal": 13859, "psalm</w>": 17995, "psalms</w>": 35003, "psb</w>": 37017, "psc</w>": 43118, "psd</w>": 28810, "pse": 19737, "pse</w>": 5423, "pseu": 24919, "pseudo</w>": 46618, "psg</w>": 17123, "psi": 45848, "psi</w>": 24533, "psic": 29299, "psis</w>": 33041, "psl</w>": 21373, "psn</w>": 36781, "pso": 27045, "pson</w>": 7487, "psori": 44688, "psp</w>": 32769, "pss": 35718, "pss</w>": 42535, "psst</w>": 47814, "pst</w>": 12692, "psu": 41286, "psu</w>": 28338, "psv</w>": 44530, "psy": 3576, "psy</w>": 11056, "psych": 31041, "psych</w>": 20509, "psyched": 19932, "psyched</w>": 35199, "psychedelic</w>": 23292, "psychi": 18147, "psychiatric</w>": 30578, "psychiatry</w>": 39706, "psychic</w>": 24916, "psycho": 6472, "psycho</w>": 22154, "psychological</w>": 18153, "psychologist</w>": 32827, "psychology</w>": 12352, "psychop": 30112, "psychotic</w>": 48774, "pt": 11139, "pt</w>": 1459, "pta</w>": 11586, "ptbo</w>": 40481, "ptc</w>": 44646, "pte</w>": 47804, "pter": 49323, "pti": 29375, "pti</w>": 10491, "ptic</w>": 20670, "ption</w>": 3479, "ptions</w>": 24963, "pto": 31372, "pto</w>": 34092, "pton</w>": 19780, "pts</w>": 5886, "ptsd</w>": 23973, "ptv</w>": 42402, "pu": 755, "pu</w>": 11780, "pub": 20720, "pub</w>": 6301, "puberty</w>": 44122, "pubg</w>": 31496, "publ": 3434, "publi": 1617, "public": 3592, "public</w>": 2122, "publica</w>": 49007, "publication</w>": 13538, "publications</w>": 27334, "publichealth</w>": 35872, "publicity</w>": 20831, "publicly</w>": 18554, "publish</w>": 19032, "published</w>": 4311, "publisher</w>": 20455, "publishers</w>": 25222, "publishes</w>": 35633, "publishing</w>": 10994, "publix</w>": 47985, "pubs</w>": 21099, "puc": 48779, "puck</w>": 17550, "pud": 39234, "pudding</w>": 14025, "puddle</w>": 33545, "pue": 20161, "pueblo</w>": 33076, "puer": 8968, "puerto</w>": 12289, "puertor": 22757, "puertorico</w>": 26356, "puff": 44477, "puff</w>": 17184, "puffin</w>": 47632, "puffs</w>": 47453, "puffy</w>": 49245, "pug": 20950, "pug</w>": 17739, "pugchat</w>": 42266, "pugh</w>": 41302, "puglia</w>": 38345, "pugs</w>": 39425, "puj": 46163, "puja</w>": 33753, "puk</w>": 31811, "pul": 2469, "pul</w>": 40512, "pula": 45856, "puli</w>": 47293, "pulit": 27745, "pulitzer</w>": 31419, "pull": 20155, "pull</w>": 6857, "pulled</w>": 8525, "pulling</w>": 12897, "pullman</w>": 40203, "pullover</w>": 44020, "pulls</w>": 16041, "pulmon": 32613, "pulmonary</w>": 39132, "pulp</w>": 25410, "pulse": 40091, "pulse</w>": 12485, "pulses</w>": 42177, "pulsion</w>": 35398, "pum": 37497, "puma</w>": 20858, "pump": 5179, "pump</w>": 9173, "pumped</w>": 12796, "pumping</w>": 25150, "pumpkin": 36386, "pumpkin</w>": 8842, "pumpkins</w>": 23787, "pumps</w>": 18540, "pun": 2707, "pun</w>": 19929, "punc": 43907, "punch": 29332, "punch</w>": 10730, "punched</w>": 31689, "punches</w>": 35279, "punching</w>": 33468, "punctu": 31565, "punctuation</w>": 47051, "pundit</w>": 41466, "pune": 32593, "pune</w>": 14488, "pung</w>": 45420, "puni": 11479, "punish</w>": 34569, "punished</w>": 31598, "punisher</w>": 38509, "punishment</w>": 19099, "punjab": 19405, "punjab</w>": 12883, "punjabi</w>": 25430, "punk": 28933, "punk</w>": 7246, "punks</w>": 47171, "puns</w>": 35231, "punt</w>": 32699, "punta</w>": 34112, "punter</w>": 47092, "pup": 11926, "pup</w>": 11302, "pupil</w>": 27265, "pupils</w>": 13628, "pupp": 7116, "puppet</w>": 18439, "puppets</w>": 28475, "puppies</w>": 14820, "puppy": 25431, "puppy</w>": 6829, "puppylove</w>": 40849, "pups</w>": 20778, "pur": 1727, "pur</w>": 6265, "pura</w>": 25596, "puram</w>": 46174, "purcell</w>": 46065, "purch": 8384, "purchase</w>": 5481, "purchased</w>": 13399, "purchases</w>": 21887, "purchasing</w>": 20718, "purdu": 40691, "purdue</w>": 22280, "pure": 14202, "pure</w>": 5979, "puree</w>": 45474, "purely</w>": 32459, "puremichigan</w>": 39783, "purest</w>": 45497, "purge</w>": 33514, "puri": 16910, "puri</w>": 21974, "purification</w>": 47724, "purity</w>": 29780, "purple": 17837, "purple</w>": 5496, "purpose": 33492, "purpose</w>": 7391, "purposes</w>": 22020, "purr": 49262, "purr</w>": 46343, "purse</w>": 16480, "pursue</w>": 19463, "pursuing</w>": 26424, "pursuit</w>": 16469, "purée</w>": 40981, "pus</w>": 13841, "pusa</w>": 40825, "push": 16028, "push</w>": 6831, "pushaw": 35407, "pushaward": 35448, "pushawards": 47184, "pushed</w>": 16155, "pushes</w>": 23828, "pushing</w>": 11549, "put": 29535, "put</w>": 1983, "putin</w>": 10693, "putnam</w>": 40235, "puts</w>": 7898, "putt</w>": 30279, "putter</w>": 44723, "putting</w>": 5154, "puzz": 19760, "puzzle</w>": 12875, "puzzles</w>": 27986, "pv": 14517, "pv</w>": 13495, "pvc</w>": 26959, "pvp</w>": 44172, "pvt</w>": 29898, "pw": 19419, "pw</w>": 16067, "pwc</w>": 22965, "px": 24790, "px</w>": 10262, "pxrtg</w>": 36262, "py": 4005, "py</w>": 7504, "pye": 31099, "pyeongchang</w>": 36066, "pyg": 41450, "pyram": 14405, "pyramid</w>": 18725, "pyramids</w>": 36877, "pyrene": 36740, "pyrenees</w>": 39744, "pyro": 39762, "python</w>": 13370, "pz</w>": 48361, "pé": 43167, "q": 80, "q</w>": 336, "qa": 24944, "qa</w>": 16360, "qad": 27844, "qadri</w>": 35672, "qaeda</w>": 31246, "qanda</w>": 48672, "qanon</w>": 19182, "qant": 35404, "qantas</w>": 43250, "qatar": 32804, "qatar</w>": 10872, "qb</w>": 8073, "qbs</w>": 38188, "qc</w>": 17406, "qe</w>": 30974, "qf</w>": 27215, "qi": 25054, "qi</w>": 11256, "qing": 46522, "qing</w>": 34339, "ql</w>": 28366, "qld": 23039, "qld</w>": 13765, "qldpol</w>": 42296, "qm</w>": 42148, "qotd</w>": 24504, "qpr</w>": 24788, "qq": 31960, "qr</w>": 18193, "qs</w>": 14364, "qt</w>": 15013, "qtr</w>": 44803, "qu": 666, "qu</w>": 28646, "qua</w>": 20363, "quack</w>": 45575, "quad": 11656, "quad</w>": 13419, "quadcopter</w>": 39792, "quadru": 35831, "quaid</w>": 34265, "quail</w>": 34392, "quaint</w>": 45976, "quake</w>": 8421, "quaker</w>": 43395, "quakes</w>": 24572, "qual": 9979, "qual</w>": 32405, "qualcomm</w>": 38683, "quali": 4574, "qualification</w>": 21508, "qualifications</w>": 35225, "qualified</w>": 11927, "qualifier</w>": 18733, "qualifiers</w>": 21388, "qualifies</w>": 35820, "qualify</w>": 17019, "qualifying</w>": 11895, "qualitative</w>": 45847, "qualities</w>": 20488, "quality": 28545, "quality</w>": 3027, "quan": 11669, "quan</w>": 27490, "quand</w>": 28198, "quant": 15050, "quanti": 31540, "quantitative</w>": 40583, "quantities</w>": 33917, "quantity</w>": 26920, "quantum</w>": 15320, "quar": 3856, "quare</w>": 42549, "quarry</w>": 27601, "quart": 7851, "quarter": 8816, "quarter</w>": 6632, "quarterback</w>": 16545, "quarterfinal</w>": 37992, "quarterfinals</w>": 28971, "quarterly</w>": 23350, "quarters</w>": 10146, "quartet</w>": 18056, "quartz</w>": 17752, "quat": 25715, "quattro</w>": 40300, "quay": 40276, "quay</w>": 17304, "que": 1147, "que</w>": 2319, "quebec</w>": 15373, "queen": 6407, "queen</w>": 2997, "queenof": 44398, "queens": 22943, "queens</w>": 9330, "queensland</w>": 15168, "queer": 38874, "queer</w>": 18161, "quel": 39774, "quel</w>": 21879, "quen": 23876, "quen</w>": 38324, "quent": 23808, "quentin</w>": 27530, "quer": 17378, "quer</w>": 26859, "quered</w>": 23210, "queries</w>": 32958, "querque</w>": 30338, "query</w>": 27464, "ques": 25328, "ques</w>": 7715, "queso</w>": 40110, "quest": 31653, "quest</w>": 4846, "questi": 2391, "question": 18961, "question</w>": 4382, "questionable</w>": 30733, "questioned</w>": 31847, "questioning</w>": 24887, "questions</w>": 3883, "quests</w>": 44611, "quet</w>": 8513, "quets</w>": 39055, "quetta</w>": 38326, "quette</w>": 18993, "queu": 32705, "queue</w>": 18549, "queues</w>": 40649, "queuing</w>": 44082, "quez</w>": 18677, "quezon</w>": 41117, "qui": 1912, "qui</w>": 18046, "quic": 26474, "quiche</w>": 47723, "quick": 5969, "quick</w>": 3712, "quicker</w>": 29211, "quickest</w>": 37734, "quickly</w>": 7787, "quid</w>": 30732, "quie": 43875, "quien</w>": 43482, "quiere</w>": 42723, "quiero</w>": 32567, "quiet": 17853, "quiet</w>": 7557, "quietly</w>": 22208, "quig": 44690, "quil": 12305, "quill</w>": 48951, "quilt</w>": 23977, "quilted</w>": 46052, "quin": 8607, "quin</w>": 17167, "quincy</w>": 27640, "quind": 32339, "quinn</w>": 12306, "quinoa</w>": 26703, "quins</w>": 39701, "quint": 26898, "quinta</w>": 47446, "quinte": 22098, "quintess": 37538, "quintet</w>": 35125, "quipment</w>": 42813, "quir": 15943, "quirky</w>": 25044, "quis": 15064, "quist</w>": 25128, "quit": 19358, "quit</w>": 11140, "quite</w>": 4135, "quito</w>": 35828, "quits</w>": 32505, "quitting</w>": 33871, "quity</w>": 33133, "quiz": 31197, "quiz</w>": 8344, "quizz": 35041, "quo": 3046, "quo</w>": 28127, "quoi</w>": 45549, "quot": 5452, "quot</w>": 47587, "quota</w>": 42097, "quotation</w>": 49195, "quote": 15446, "quote</w>": 4020, "quoted</w>": 27706, "quoteoftheday</w>": 19975, "quotes</w>": 5808, "quoting</w>": 31651, "qur</w>": 37782, "quran</w>": 19690, "qureshi</w>": 46307, "qvist</w>": 42322, "qx</w>": 45038, "r": 81, "r</w>": 337, "ra": 559, "ra</w>": 1735, "raa</w>": 44344, "rab": 14816, "rab</w>": 33224, "rabb": 6875, "rabbi</w>": 20959, "rabbit</w>": 10274, "rabbits</w>": 27028, "rabhu</w>": 25806, "rable</w>": 10182, "rac": 1773, "rac</w>": 30462, "raccoon</w>": 29516, "race": 10978, "race</w>": 2471, "racec": 18814, "racecourse</w>": 25036, "raced</w>": 36021, "racer</w>": 16798, "racers</w>": 33603, "races</w>": 8605, "raceway</w>": 24650, "rach": 6876, "rach</w>": 33429, "racha": 21952, "racha</w>": 35022, "rachael</w>": 29095, "rachel": 13511, "rachel</w>": 8029, "raci": 33381, "racial</w>": 13801, "racially</w>": 43577, "racing": 23306, "racing</w>": 3699, "racism</w>": 11276, "racist</w>": 9684, "racists</w>": 41777, "rack": 24600, "rack</w>": 12034, "racket</w>": 37691, "racks</w>": 21191, "rad": 4473, "rad</w>": 8238, "rada</w>": 30437, "radar</w>": 9672, "radcliffe</w>": 33096, "rade": 44494, "rade</w>": 17911, "rader</w>": 45002, "radford</w>": 45800, "radha</w>": 43122, "radi": 5772, "radial</w>": 42028, "radiance</w>": 45670, "radiant</w>": 25614, "radiation</w>": 18210, "radiator</w>": 39372, "radic": 18082, "radical</w>": 13712, "radicals</w>": 45903, "radio": 7176, "radio</w>": 2638, "radioactive</w>": 34704, "radiodisney</w>": 36483, "radiohead</w>": 39472, "radiology</w>": 29684, "radios</w>": 43669, "radish</w>": 37789, "radius</w>": 37570, "rado</w>": 29784, "rae": 21646, "rae</w>": 15051, "rael</w>": 45390, "raer</w>": 44561, "raf": 11495, "raf</w>": 11490, "rafa": 14352, "rafa</w>": 24850, "rafael": 38221, "rafael</w>": 19216, "rafaelnadal</w>": 49219, "raff": 34900, "raffic</w>": 32928, "raffle</w>": 13752, "raffles</w>": 43489, "rafi": 35304, "raft</w>": 9233, "rafting</w>": 36309, "rag": 13958, "rag</w>": 20687, "rage</w>": 8593, "rages</w>": 34253, "ragh": 35642, "ragha": 40972, "raging</w>": 25015, "ragn": 24125, "ragnar": 34385, "ragnarok</w>": 41856, "ragon</w>": 34768, "rags</w>": 47838, "rah": 12277, "rah</w>": 8766, "raheem</w>": 43317, "rahim</w>": 24152, "rahman</w>": 19680, "rahu": 13129, "rahul": 37239, "rahul</w>": 17440, "rahulg": 27510, "rahulgandhi</w>": 28293, "rai": 9165, "rai</w>": 9638, "raid</w>": 6877, "raided</w>": 43417, "raider": 27368, "raider</w>": 21455, "raidernation</w>": 47901, "raiders</w>": 11817, "raids</w>": 26655, "rail": 4573, "rail</w>": 6879, "raila</w>": 47273, "railminindia</w>": 35557, "railroad</w>": 17080, "rails</w>": 23427, "railway": 27614, "railway</w>": 7856, "railwayana</w>": 46750, "railways</w>": 20765, "raim": 45785, "rain": 3128, "rain</w>": 2443, "raina</w>": 30564, "rainbow": 24562, "rainbow</w>": 6286, "rainbows</w>": 30483, "raine": 49038, "raine</w>": 6871, "rained</w>": 32310, "rainf": 15024, "rainfall</w>": 15350, "rainforest</w>": 22823, "rainier</w>": 37850, "raining</w>": 13964, "rains</w>": 14272, "rainy</w>": 10222, "rais": 14729, "raise": 24249, "raise</w>": 5078, "raised</w>": 6027, "raiser</w>": 33555, "raises</w>": 13297, "raisethe": 47109, "raisin</w>": 36864, "raising</w>": 6883, "raj": 5958, "raj</w>": 10813, "raja": 46069, "raja</w>": 19150, "rajan</w>": 46595, "rajas": 16185, "rajasthan</w>": 18017, "raje": 21899, "rajesh</w>": 43602, "raji": 27569, "rajini": 29600, "rajini</w>": 40622, "rajinikanth</w>": 32922, "rajiv</w>": 40197, "rajkumar</w>": 49304, "rajput</w>": 47572, "raju</w>": 47029, "rak": 13523, "rak</w>": 26287, "rake": 26825, "rake</w>": 32712, "rakesh</w>": 41083, "ral": 8062, "ral</w>": 1406, "rale": 14192, "raleigh</w>": 18207, "rall": 23249, "rallies</w>": 25230, "rally": 18882, "rally</w>": 5041, "rallying</w>": 36836, "ralph": 25290, "ralph</w>": 12234, "ram": 1976, "ram</w>": 2007, "rama</w>": 22112, "ramad": 12736, "ramadan</w>": 15547, "ramadhan</w>": 47415, "raman</w>": 39816, "ramapho": 43963, "ramaphosa</w>": 44993, "ramatta</w>": 49112, "rambo</w>": 41855, "ramcharan</w>": 45275, "rame</w>": 47745, "ramen</w>": 18892, "ramesh": 48640, "ramesh</w>": 40186, "rami</w>": 43016, "ramirez</w>": 23877, "ramon</w>": 27958, "ramone</w>": 47201, "ramos</w>": 21046, "ramp</w>": 14271, "rampage</w>": 32077, "rampant</w>": 41985, "ramps</w>": 35257, "rams</w>": 10292, "ramsay</w>": 26259, "ramsey</w>": 19215, "ran": 1433, "ran</w>": 4031, "rana</w>": 22143, "ranbir</w>": 40881, "rance</w>": 29034, "ranch": 43955, "ranch</w>": 10659, "rancho</w>": 26258, "rand": 5628, "rand</w>": 18718, "randall</w>": 23639, "rande</w>": 21469, "randolph</w>": 29899, "random": 11396, "random</w>": 6160, "randomly</w>": 17272, "rands</w>": 39153, "randy": 29479, "randy</w>": 13279, "rane</w>": 28852, "rang": 4043, "rang</w>": 24377, "range": 13627, "range</w>": 3818, "ranger": 31472, "ranger</w>": 13593, "rangers</w>": 7664, "ranges</w>": 25685, "ranging</w>": 25946, "rani": 29264, "rani</w>": 22631, "rank</w>": 11501, "ranked</w>": 8307, "rankin</w>": 37539, "ranking</w>": 12347, "rankings</w>": 12596, "ranks</w>": 14469, "rano</w>": 18608, "rans</w>": 46259, "ransom": 28523, "ransom</w>": 34646, "ransomware</w>": 33815, "rant": 46467, "rant</w>": 9819, "rants</w>": 34014, "ranveer": 32402, "ranveer</w>": 41482, "ranveerofficial</w>": 42116, "rao</w>": 16913, "rap": 7773, "rap</w>": 7348, "rape": 46099, "rape</w>": 10070, "raped</w>": 23700, "rapha": 22754, "raphael</w>": 30091, "rapi": 8610, "rapid": 47697, "rapid</w>": 12205, "rapidly</w>": 16710, "rapids</w>": 18848, "raping</w>": 44926, "rapist</w>": 33360, "rapp": 19283, "rapper</w>": 11860, "rappers</w>": 30315, "rapping</w>": 42864, "raps</w>": 37887, "raptor</w>": 26762, "raptors</w>": 17035, "raq": 39787, "raq</w>": 43312, "raqqa</w>": 47074, "raquel</w>": 44338, "rar": 26819, "rar</w>": 24605, "rard</w>": 21012, "rare": 18992, "rare</w>": 3865, "rarely</w>": 17315, "rarest</w>": 43237, "rarity</w>": 45862, "ras": 23492, "ras</w>": 8224, "rasc": 30085, "rascal</w>": 43481, "rash": 14917, "rash</w>": 30608, "rashad</w>": 46527, "rasheed</w>": 41638, "rashi": 19426, "rashid</w>": 26757, "rasp": 10487, "raspberries</w>": 37742, "raspberry": 40162, "raspberry</w>": 13615, "raspberrypi</w>": 43934, "rass</w>": 45654, "rasta</w>": 47002, "rat": 3806, "rat</w>": 8985, "rata</w>": 28568, "ratchet</w>": 25078, "rate</w>": 5068, "rated</w>": 8183, "rates</w>": 6864, "rath": 18268, "rath</w>": 39772, "rather</w>": 5252, "rati": 11486, "rating</w>": 10567, "ratings</w>": 14176, "ratio</w>": 15893, "ration": 27002, "ration</w>": 35662, "rational</w>": 33086, "ratna</w>": 49078, "ratri</w>": 32288, "rats</w>": 19043, "ratt": 20737, "ratt</w>": 34785, "rattle": 40824, "rattle</w>": 41839, "rau": 27744, "raul</w>": 30218, "raun": 41169, "rav": 14367, "rav</w>": 23606, "rave": 38784, "rave</w>": 17601, "ravel</w>": 27927, "raven": 10269, "raven</w>": 16803, "ravens</w>": 17946, "ravi": 22947, "ravi</w>": 19538, "ravin": 39099, "raving</w>": 45807, "raviol": 41104, "ravioli</w>": 43460, "raw": 10166, "raw</w>": 6323, "rawlings</w>": 40662, "rax</w>": 38520, "ray": 5312, "ray</w>": 3077, "raya</w>": 29991, "raymond</w>": 16683, "rayn": 47852, "rayon</w>": 47900, "rays</w>": 11064, "raz": 9700, "raz</w>": 19087, "raza</w>": 37724, "razer</w>": 33832, "razor": 24934, "razor</w>": 21300, "razz": 43769, "rb": 12740, "rb</w>": 7477, "rbc</w>": 37500, "rbi</w>": 15687, "rbs</w>": 29102, "rc": 7575, "rc</w>": 7457, "rca</w>": 33942, "rcb</w>": 45240, "rcmp</w>": 31489, "rcn</w>": 49370, "rctid</w>": 49223, "rd": 13501, "rd</w>": 1973, "rda</w>": 45755, "rdr</w>": 44364, "rds</w>": 32378, "re": 515, "re</w>": 810, "rea</w>": 11521, "reach": 4483, "reach</w>": 4279, "reached</w>": 6878, "reaches</w>": 14462, "reaching</w>": 11358, "react": 36566, "react</w>": 15065, "reacted</w>": 42515, "reacting</w>": 40595, "reaction</w>": 7189, "reactions</w>": 18438, "reactive</w>": 42072, "reactjs</w>": 46173, "reactor</w>": 32037, "reacts</w>": 23115, "read": 933, "read</w>": 1199, "reader</w>": 9884, "readers</w>": 10335, "readiness</w>": 28131, "reading": 17556, "reading</w>": 2337, "readingfc</w>": 47428, "readings</w>": 23361, "reads</w>": 6597, "ready": 17351, "ready</w>": 1112, "reagan</w>": 17767, "real": 2017, "real</w>": 1532, "realdonaldtrump</w>": 7025, "reale": 5930, "realest</w>": 45855, "realestate": 32937, "realestate</w>": 6569, "reali": 4185, "realis</w>": 38114, "realise</w>": 14773, "realised</w>": 17945, "realising</w>": 39537, "realism</w>": 20024, "realist</w>": 30248, "realistic</w>": 16157, "realities</w>": 32443, "reality": 46802, "reality</w>": 5004, "realization</w>": 40402, "realize</w>": 7538, "realized</w>": 10489, "realizes</w>": 42918, "realizing</w>": 23284, "reall": 39686, "really": 43249, "really</w>": 1414, "realm</w>": 23083, "realmadrid</w>": 27866, "realms</w>": 43033, "realness</w>": 46761, "realtime": 44002, "realtime</w>": 38203, "realtor</w>": 18038, "realtors</w>": 31759, "realty</w>": 20471, "ream": 37242, "ream</w>": 15219, "rean</w>": 48477, "reap</w>": 31334, "reaper</w>": 29922, "rear": 39652, "rear</w>": 10223, "reas": 9121, "reason": 12882, "reason</w>": 3893, "reasonable</w>": 18558, "reasonably</w>": 38589, "reasoning</w>": 30341, "reasons</w>": 5686, "reau</w>": 32398, "reb": 12370, "reb</w>": 18796, "reba</w>": 48543, "rebate</w>": 43817, "rebe": 25227, "rebec": 10774, "rebecca</w>": 12892, "rebel": 8185, "rebel</w>": 12248, "rebellion</w>": 22170, "rebels</w>": 13623, "rebirth</w>": 33303, "reboot</w>": 22385, "reborn</w>": 30229, "reboun": 43381, "rebound</w>": 31280, "rebounds</w>": 19190, "rebs</w>": 28164, "rebu": 43162, "rebuild</w>": 20022, "rebuilding</w>": 30880, "rebuilt</w>": 33137, "rec": 1020, "rec</w>": 11243, "recall</w>": 15151, "recalled</w>": 32142, "recalling</w>": 47855, "recalls</w>": 24740, "recap": 29816, "recap</w>": 8337, "recaps</w>": 47997, "recard</w>": 35536, "rece": 1890, "recei": 2148, "receip": 38503, "receipt</w>": 30479, "receipts</w>": 41181, "receive</w>": 4800, "received</w>": 4178, "receiver</w>": 17659, "receivers</w>": 45294, "receives</w>": 10027, "receiving</w>": 7252, "recent</w>": 3969, "recently</w>": 4482, "recep": 17450, "reception</w>": 8364, "receptions</w>": 46881, "receptor</w>": 41835, "recess</w>": 38182, "recession</w>": 27176, "recharge</w>": 29396, "rechargeable</w>": 37516, "reci": 2037, "recipe": 28923, "recipe</w>": 4614, "recipeoftheday</w>": 38727, "recipes</w>": 9243, "recipi": 10136, "recipient</w>": 13703, "recipients</w>": 18940, "recipro": 41789, "recital</w>": 23457, "recite</w>": 48824, "reck": 11715, "reckless</w>": 26284, "reckon</w>": 23854, "recl": 42277, "reclaim</w>": 35969, "reclaimed</w>": 32648, "reco": 2535, "reco</w>": 46038, "recogn": 6343, "recogni": 5329, "recognise</w>": 19824, "recognised</w>": 20986, "recognising</w>": 48423, "recognition</w>": 9415, "recognizable</w>": 47240, "recognize</w>": 10905, "recognized</w>": 9929, "recognizes</w>": 26909, "recognizing</w>": 19666, "recomm": 4540, "recommend": 11628, "recommend</w>": 8942, "recommendation</w>": 20118, "recommendations</w>": 16516, "recommended</w>": 11100, "recommending</w>": 44301, "recommends</w>": 22940, "recon": 15371, "recon</w>": 28996, "reconciliation</w>": 26451, "reconstruction</w>": 24955, "recor": 1723, "record": 21328, "record</w>": 2717, "recorded</w>": 9392, "recorder</w>": 26747, "recording": 48237, "recording</w>": 6942, "recordings</w>": 19715, "records</w>": 4529, "recover</w>": 16785, "recovered</w>": 16444, "recovering</w>": 19005, "recovers</w>": 47935, "recovery</w>": 6591, "recre": 22148, "recreate</w>": 29775, "recreated</w>": 40888, "recreating</w>": 48224, "recreation</w>": 17331, "recreational</w>": 24329, "recru": 4745, "recruit": 9011, "recruit</w>": 15585, "recruited</w>": 36518, "recruiter</w>": 43120, "recruiters</w>": 46542, "recruiting</w>": 10533, "recruitment</w>": 10541, "recruits</w>": 22647, "recs</w>": 33069, "rectan": 43041, "rectangular</w>": 43321, "rector</w>": 41585, "recu": 26798, "recur": 19983, "recurring</w>": 35912, "recy": 6790, "recycla": 40659, "recyclable</w>": 48907, "recycle</w>": 19366, "recycled</w>": 16829, "recycling</w>": 12566, "red": 1893, "red</w>": 736, "redbubble</w>": 46137, "redbull": 29483, "redbull</w>": 29219, "redcarpet</w>": 32259, "redcross</w>": 30659, "redd": 22149, "redd</w>": 40618, "redding</w>": 41061, "reddish</w>": 43383, "reddit</w>": 15226, "reddy</w>": 23028, "rede": 10913, "redeem</w>": 37449, "redefining</w>": 46352, "redemption</w>": 20233, "redesign</w>": 24188, "redesigned</w>": 33111, "redevelopment</w>": 30322, "redhead</w>": 36267, "redi": 7976, "redman</w>": 44753, "redmond</w>": 39627, "rednation": 28180, "rednationrising</w>": 28262, "redneck</w>": 39105, "redness</w>": 22626, "redo</w>": 42524, "redon": 48506, "redro": 37722, "reds</w>": 11221, "redskins</w>": 19023, "redsox</w>": 19144, "reduc": 5015, "reduce</w>": 6604, "reduced</w>": 10821, "reduces</w>": 20539, "reducing</w>": 13836, "reduction</w>": 12219, "reductions</w>": 48263, "redux</w>": 43014, "redvelvet</w>": 41845, "redwings</w>": 31058, "redwood</w>": 31748, "ree": 9282, "ree</w>": 5813, "reebok</w>": 26734, "reece</w>": 30457, "reed": 26209, "reed</w>": 10435, "reedus</w>": 32865, "reef": 46557, "reef</w>": 15624, "reefs</w>": 34459, "reel": 34467, "reel</w>": 17166, "reels</w>": 48127, "reem</w>": 48891, "reen": 21638, "reen</w>": 23679, "rees</w>": 18314, "reese</w>": 20929, "reeves</w>": 23060, "ref": 4067, "ref</w>": 9591, "refe": 5624, "refer": 18425, "refer</w>": 22325, "referee</w>": 20398, "referees</w>": 45583, "referen": 13535, "reference</w>": 10214, "references</w>": 24009, "referendum</w>": 16732, "referr": 47784, "referral</w>": 30219, "referred</w>": 22969, "referring</w>": 29797, "refers</w>": 30069, "refill</w>": 37859, "refin": 13455, "refined</w>": 26098, "refinery</w>": 31393, "refining</w>": 48406, "reflec": 4608, "reflect</w>": 13373, "reflected</w>": 28732, "reflecting</w>": 19700, "reflection</w>": 11884, "reflections</w>": 16647, "reflective</w>": 27008, "reflects</w>": 15821, "reflex": 45756, "reflex</w>": 36050, "reform": 45678, "reform</w>": 8875, "reformation</w>": 45119, "reformed</w>": 40880, "reforms</w>": 19274, "refr": 34850, "refre": 11995, "refresh": 17836, "refresh</w>": 23288, "refreshed</w>": 35925, "refresher</w>": 41481, "refreshing</w>": 14159, "refreshments</w>": 31127, "refriger": 21076, "refrigerator</w>": 36662, "refs</w>": 35595, "refu": 3545, "refuge": 5638, "refuge</w>": 17432, "refugee</w>": 11556, "refugees": 42687, "refugees</w>": 8316, "refund</w>": 28899, "refur": 15519, "refurbi": 18259, "refurbished</w>": 26190, "refurbishment</w>": 35803, "refusal</w>": 46547, "refuse</w>": 16412, "refused</w>": 17190, "refuses</w>": 20085, "refusing</w>": 26704, "reg": 5472, "reg</w>": 12353, "regain</w>": 37510, "regal": 31512, "regal</w>": 25028, "regan</w>": 34062, "regar": 5881, "regard</w>": 21801, "regarded</w>": 32017, "regarding</w>": 8493, "regardless</w>": 17220, "regards</w>": 23079, "regatta</w>": 26316, "regen": 46545, "regency</w>": 29341, "regeneration</w>": 29257, "regent</w>": 30455, "regents</w>": 46710, "regg": 12757, "reggae": 37821, "reggae</w>": 15214, "reggie</w>": 21872, "regi": 1608, "regime</w>": 11378, "regiment</w>": 18603, "regin": 23287, "regina</w>": 16841, "region": 16542, "region</w>": 4341, "regional</w>": 5552, "regionals</w>": 26043, "regions</w>": 14530, "regis</w>": 28094, "register</w>": 3967, "registered</w>": 10254, "registering</w>": 33510, "registr": 29193, "registration</w>": 7302, "registrations</w>": 38423, "registry</w>": 30020, "rego</w>": 47351, "regram</w>": 30329, "regrann</w>": 48802, "regre": 8627, "regression</w>": 43733, "regret</w>": 14374, "regrets</w>": 23231, "regu": 3411, "regui</w>": 46722, "regul": 11847, "regular": 14882, "regular</w>": 6307, "regularly</w>": 17263, "regulat": 14575, "regulate</w>": 33494, "regulated</w>": 31384, "regulating</w>": 48156, "regulation</w>": 14267, "regulations</w>": 16654, "regulator</w>": 30364, "regulators</w>": 35837, "regulatory</w>": 17717, "reh": 21492, "reha": 10193, "rehab</w>": 16973, "rehabil": 17930, "rehabilitation</w>": 21042, "rehear": 7273, "rehearsal</w>": 11482, "rehearsals</w>": 17977, "rehearsing</w>": 23125, "rehman</w>": 39206, "rei": 15343, "rei</w>": 26033, "reic": 41230, "reich": 48589, "reich</w>": 28929, "reid": 45125, "reid</w>": 11744, "reig": 13092, "reign": 41419, "reign</w>": 14827, "reigning</w>": 28409, "reigns</w>": 21217, "reiki</w>": 46960, "reilly</w>": 28120, "reim": 35421, "reimagined</w>": 46799, "reimbur": 39857, "rein": 9240, "rein</w>": 45009, "reina</w>": 43847, "reinde": 23810, "reindeer</w>": 25072, "reinfor": 48161, "reinforced</w>": 41909, "reinst": 33969, "reinvent</w>": 38171, "reissue</w>": 34042, "reiter": 35394, "rejec": 9958, "reject</w>": 22435, "rejected</w>": 17505, "rejection</w>": 32264, "rejects</w>": 23155, "rejo": 20150, "rejoice</w>": 24712, "rejuven": 26332, "rek": 47542, "rek</w>": 19201, "rel": 1825, "rel</w>": 5233, "rela": 4362, "reland</w>": 15220, "relat": 27192, "relatable</w>": 31010, "relate</w>": 17520, "related</w>": 5880, "relates</w>": 36064, "relating</w>": 27373, "relation": 4561, "relation</w>": 16207, "relations</w>": 10100, "relationship": 47239, "relationship</w>": 5837, "relationships</w>": 10610, "relative</w>": 17265, "relatively</w>": 18351, "relatives</w>": 21981, "relax": 6777, "relax</w>": 9035, "relaxation</w>": 22194, "relaxed</w>": 18999, "relaxing</w>": 10256, "relay</w>": 12403, "relays</w>": 28404, "rele": 1602, "release": 29100, "release</w>": 2706, "released</w>": 3410, "releases</w>": 7393, "releasethe": 44008, "releasing</w>": 10321, "releg": 23378, "relegated</w>": 45884, "relegation</w>": 35040, "relent": 22213, "relentless</w>": 27207, "relessly</w>": 33927, "relev": 9349, "relevance</w>": 31400, "relevant</w>": 10568, "reli": 2674, "reliability</w>": 27220, "reliable</w>": 13714, "reliance</w>": 27727, "relic</w>": 27802, "relics</w>": 43208, "relief</w>": 7518, "relies</w>": 41579, "relieve</w>": 28623, "relieved</w>": 36597, "religi": 4940, "religion</w>": 8803, "religions</w>": 31189, "religious</w>": 8289, "relish</w>": 35550, "relive</w>": 23939, "reliving</w>": 47558, "rell": 28802, "rell</w>": 7127, "rella</w>": 9952, "relle</w>": 31390, "reloaded</w>": 38908, "relocated</w>": 46791, "relocation</w>": 39198, "rels</w>": 23320, "relu": 32058, "reluct": 32549, "reluctant</w>": 45552, "rely</w>": 4158, "relying</w>": 42168, "rem": 15098, "rem</w>": 21637, "rema": 4569, "remain": 29144, "remain</w>": 6415, "remainder</w>": 41672, "remained</w>": 23714, "remaining</w>": 11392, "remains</w>": 6807, "remake</w>": 16234, "remark": 11136, "remarkable</w>": 12404, "remarkably</w>": 39087, "remarks</w>": 15001, "remastered</w>": 24932, "rematch</w>": 26473, "rembrandt</w>": 45972, "reme": 20071, "remedi": 18442, "remedies</w>": 25581, "remedy</w>": 25794, "remem": 7966, "rememb": 7062, "remember": 22045, "remember</w>": 2195, "remembered</w>": 11763, "remembering</w>": 8135, "remembers</w>": 12551, "remembrance": 40321, "remembrance</w>": 15860, "remembranceday</w>": 48333, "rement</w>": 7173, "rements</w>": 12667, "remi</w>": 41693, "remin": 3216, "remind</w>": 9868, "reminded</w>": 12309, "reminder</w>": 5565, "reminders</w>": 34121, "reminding</w>": 19976, "reminds</w>": 8303, "remington</w>": 43527, "reminis": 17723, "reminiscent</w>": 41704, "reminiscing</w>": 32552, "remix</w>": 8519, "remixes</w>": 31011, "remn": 29127, "remnants</w>": 39032, "remo": 4064, "remo</w>": 33259, "remodel": 34159, "remodel</w>": 37495, "remodeling</w>": 41432, "remote": 47163, "remote</w>": 9687, "remotely</w>": 32375, "removable</w>": 44095, "removal</w>": 13679, "remove</w>": 9709, "removed</w>": 10289, "remover</w>": 44267, "removes</w>": 29018, "removing</w>": 18504, "remy</w>": 30434, "ren": 737, "ren</w>": 2596, "rena</w>": 12591, "renais": 15409, "renaissance</w>": 16007, "renal</w>": 36096, "renamed</w>": 31535, "renault</w>": 17600, "rence": 19245, "rence</w>": 1553, "rences</w>": 8545, "rend": 33932, "rend</w>": 22851, "render": 39752, "render</w>": 13024, "rendered</w>": 23652, "rendering</w>": 21339, "renders</w>": 39419, "rendez": 43293, "rendezvous</w>": 45644, "rendition</w>": 28891, "rendon</w>": 46272, "rendous</w>": 49403, "rends</w>": 38842, "rene": 15438, "rene</w>": 12597, "renee</w>": 23480, "reneg": 29909, "renegade</w>": 41229, "renergy</w>": 37151, "renew": 6645, "renew</w>": 22015, "renewable": 31269, "renewable</w>": 15941, "renewableenergy</w>": 33357, "renewables</w>": 21619, "renewal</w>": 21270, "renewed</w>": 20524, "renfre": 45043, "reng</w>": 36795, "reno": 11520, "reno</w>": 12831, "renov": 9984, "renovated</w>": 23839, "renovation</w>": 17121, "renovations</w>": 31311, "renowned</w>": 14727, "rens</w>": 18183, "renshaw</w>": 44445, "rent": 17377, "rent</w>": 1609, "rental</w>": 12193, "rentals</w>": 24105, "rented</w>": 35932, "rential</w>": 31692, "renting</w>": 37662, "rently</w>": 2615, "rents</w>": 31109, "reo": 15963, "reo</w>": 26854, "reon</w>": 15761, "reopen</w>": 26883, "reopened</w>": 32868, "reopening</w>": 36663, "reopens</w>": 40644, "rep": 4229, "rep</w>": 6487, "repair</w>": 8419, "repaired</w>": 32953, "repairing</w>": 38534, "repairs</w>": 16297, "repar": 34065, "repe": 5785, "repeal": 42622, "repeal</w>": 23938, "repeat</w>": 10192, "repeated</w>": 27904, "repeatedly</w>": 26630, "repeating</w>": 33834, "repeats</w>": 39158, "repell": 46235, "repent": 47261, "reper": 29085, "repet": 38533, "repl": 13047, "replac": 6069, "replace</w>": 9466, "replaceable</w>": 47762, "replaced</w>": 13200, "replacement</w>": 10835, "replaces</w>": 27781, "replacing</w>": 18647, "replay</w>": 16875, "repleni": 44839, "replic": 21651, "replica</w>": 18125, "replied</w>": 24238, "replies</w>": 18808, "reply</w>": 8965, "replying</w>": 47599, "repor": 2628, "report</w>": 2417, "reported</w>": 7598, "reportedly</w>": 10953, "reporter</w>": 11019, "reporters</w>": 18454, "reporting</w>": 9218, "reports</w>": 4908, "reposit": 41276, "repository</w>": 46977, "repost": 33147, "repost</w>": 7217, "repostapp</w>": 38388, "reposting</w>": 20223, "reppin</w>": 19163, "repping</w>": 22574, "repre": 3397, "represent": 8293, "represent</w>": 8406, "representation</w>": 13520, "representative</w>": 13175, "representatives</w>": 15591, "represented</w>": 12299, "representing</w>": 7561, "represents</w>": 14433, "repri": 31854, "reproduction</w>": 35714, "reproductive</w>": 25522, "reps</w>": 14265, "reptile</w>": 36938, "reptiles</w>": 38679, "republic": 6376, "republic</w>": 7185, "republican</w>": 9842, "republicans</w>": 12384, "repur": 41852, "req</w>": 42411, "requ": 10664, "reque": 9539, "request</w>": 7813, "requested</w>": 16199, "requesting</w>": 33245, "requests</w>": 17087, "requi": 4863, "requiem</w>": 40316, "require</w>": 14437, "required</w>": 8500, "requirement</w>": 27146, "requirements</w>": 12860, "requires</w>": 13396, "requiring</w>": 33425, "requis": 42602, "rer": 41295, "rer</w>": 3407, "rera</w>": 14301, "rero</w>": 21860, "rers</w>": 18869, "res": 4466, "res</w>": 934, "resc": 3956, "rescheduled</w>": 43553, "rescu": 8618, "rescue": 28567, "rescue</w>": 5718, "rescued</w>": 11919, "rescues</w>": 32439, "rescuing</w>": 43770, "rese": 13000, "resear": 6090, "research": 25694, "research</w>": 2379, "researched</w>": 42733, "researcher</w>": 18334, "researchers</w>": 9522, "researching</w>": 24544, "reseller</w>": 35391, "resemb": 16916, "resemblance</w>": 26856, "resemble</w>": 37230, "resembles</w>": 35417, "reser": 16420, "reserv": 11906, "reservation</w>": 20289, "reservations</w>": 19307, "reserve</w>": 6911, "reserved</w>": 19796, "reserves</w>": 19705, "reservoir</w>": 20574, "reset</w>": 26250, "resh</w>": 47432, "reshi</w>": 39435, "resi": 2152, "residen": 22311, "residence</w>": 11672, "residences</w>": 38855, "residency</w>": 18545, "resident</w>": 9016, "residente": 44637, "residentevil</w>": 48393, "residential</w>": 11002, "residents</w>": 6008, "resign</w>": 23584, "resignation</w>": 24779, "resigned</w>": 31014, "resigns</w>": 29738, "resil": 10932, "resili": 39212, "resilience</w>": 15271, "resilient</w>": 24694, "resin</w>": 24156, "resist": 37345, "resist</w>": 9587, "resistance</w>": 7392, "resistant</w>": 17542, "resisting</w>": 43679, "resolution</w>": 9977, "resolutions</w>": 26816, "resolve</w>": 20787, "resolved</w>": 28807, "reson": 18092, "resonance</w>": 42310, "resort</w>": 6594, "resorts</w>": 18839, "resource": 43729, "resource</w>": 9760, "resources</w>": 6723, "respec": 7466, "respect": 31411, "respect</w>": 4916, "respected</w>": 19126, "respectful</w>": 24379, "respecting</w>": 36172, "respective</w>": 25817, "respectively</w>": 28794, "respects</w>": 23553, "respir": 20771, "respiratory</w>": 24483, "respon": 2421, "respond</w>": 12355, "responded</w>": 21121, "respondents</w>": 49253, "responders</w>": 25155, "responding</w>": 18037, "responds</w>": 17436, "response</w>": 5399, "responses</w>": 19006, "responsi": 5490, "responsibilities</w>": 30375, "responsibility</w>": 11272, "responsible</w>": 8936, "responsibly</w>": 33675, "responsive</w>": 21544, "ress": 34651, "ress</w>": 13629, "resso</w>": 15133, "rest": 10974, "rest</w>": 2539, "restart</w>": 37378, "restaur": 3775, "restaurant": 41930, "restaurant</w>": 4489, "restaurants</w>": 11714, "rested</w>": 46020, "resting</w>": 18044, "restless</w>": 36724, "restling</w>": 30076, "resto": 11118, "resto</w>": 41666, "restock</w>": 34060, "restocked</w>": 36966, "restor": 8984, "restoration</w>": 11989, "restorative</w>": 46509, "restore</w>": 14008, "restored</w>": 14238, "restoring</w>": 24406, "restra": 25424, "restric": 11036, "restricted</w>": 27197, "restriction</w>": 44282, "restrictions</w>": 19884, "restroom</w>": 43423, "restructuring</w>": 43260, "rests</w>": 33775, "resu": 10095, "resul": 2655, "result</w>": 5659, "resulted</w>": 26449, "resulting</w>": 24581, "results</w>": 3790, "resume</w>": 15077, "resumes</w>": 30268, "resur": 14865, "resurg": 45962, "resurgence</w>": 47692, "resurrec": 18487, "resurrection</w>": 25811, "resusc": 47523, "ret": 20500, "ret</w>": 10048, "reta": 20153, "retail": 14910, "retail</w>": 6455, "retailer</w>": 22549, "retailers</w>": 19418, "retain</w>": 24430, "retained</w>": 42737, "retaining</w>": 35571, "retains</w>": 42583, "retali": 33101, "retar": 29964, "retarded</w>": 44111, "retention</w>": 26247, "rethink</w>": 29078, "rethinking</w>": 42951, "reti": 4721, "retin": 31270, "retina</w>": 36919, "retire</w>": 18846, "retired</w>": 11477, "retirement</w>": 9205, "retires</w>": 29060, "retiring</w>": 21200, "retrac": 32735, "retreat</w>": 11210, "retri": 16918, "retriever</w>": 28394, "retro": 6535, "retro</w>": 7755, "retrogamer</w>": 47220, "retrogaming</w>": 11316, "retrospective</w>": 27105, "rett": 41082, "rett</w>": 8425, "rette</w>": 33066, "return": 43042, "return</w>": 3458, "returned</w>": 10476, "returning</w>": 9290, "returns</w>": 5020, "retwee": 48190, "retweet</w>": 3195, "retweeted</w>": 12705, "retweeting</w>": 32345, "retweets</w>": 10160, "rety</w>": 41550, "reu": 20255, "reu</w>": 40371, "reuben</w>": 40450, "reunion</w>": 10247, "reunite</w>": 26179, "reunited</w>": 13516, "reusable</w>": 30395, "reuse</w>": 26535, "reut": 15210, "reuters</w>": 15569, "rev": 8424, "rev</w>": 11789, "revamp</w>": 29819, "revamped</w>": 36420, "revan</w>": 45277, "reve": 3115, "reveal</w>": 8052, "revealed</w>": 7171, "revealing</w>": 21321, "reveals</w>": 6621, "revel": 14133, "revelation</w>": 24053, "revelations</w>": 36163, "reven": 10171, "revenge</w>": 12717, "revenue</w>": 10637, "revenues</w>": 33348, "rever": 14829, "rever</w>": 41913, "revere</w>": 44187, "reverend</w>": 34407, "revers": 20726, "reversal</w>": 33367, "reverse</w>": 12812, "reversed</w>": 42485, "reversi": 31601, "reversible</w>": 34212, "revi": 8317, "review</w>": 2268, "reviewed</w>": 16678, "reviewer</w>": 36409, "reviewers</w>": 48195, "reviewing</w>": 20458, "reviews</w>": 7227, "revise</w>": 46801, "revised</w>": 22806, "revising</w>": 46882, "revision</w>": 20335, "revisit</w>": 26568, "revisited</w>": 34302, "revisiting</w>": 33144, "revit": 26367, "revitalization</w>": 46923, "revival</w>": 14142, "revive</w>": 26450, "revived</w>": 42912, "revo": 28660, "revol": 13447, "revolt</w>": 31697, "revolu": 4900, "revolution": 17699, "revolution</w>": 6644, "revolutionary</w>": 14734, "revolver</w>": 38747, "revolving</w>": 47230, "revs</w>": 49286, "revue</w>": 43428, "rew</w>": 37564, "rewar": 15857, "reward</w>": 11223, "rewarded</w>": 27163, "rewarding</w>": 23351, "rewards</w>": 15235, "rewatch</w>": 35610, "rewatching</w>": 41287, "rewind</w>": 26867, "rewrite</w>": 45218, "rex": 13002, "rex</w>": 10904, "rexperience</w>": 33924, "rey": 9681, "rey</w>": 4517, "reyes</w>": 18255, "reykja": 47571, "reyn": 11998, "reynolds</w>": 14309, "reys</w>": 48284, "rez": 27597, "rez</w>": 15192, "reza</w>": 35888, "rf": 35529, "rf</w>": 16368, "rfc</w>": 19003, "rfid</w>": 40204, "rg": 33055, "rg</w>": 14897, "rgb</w>": 36128, "rgv</w>": 33685, "rh": 8745, "rh</w>": 22404, "rha": 19473, "rhapso": 32532, "rhapsody</w>": 35774, "rhe": 9186, "rhea</w>": 28612, "rhetor": 24359, "rhetoric</w>": 29985, "rhett</w>": 42984, "rheu": 42953, "rhi": 21212, "rhin": 12269, "rhine": 22863, "rhine</w>": 44833, "rhinestone</w>": 30450, "rhino": 41744, "rhino</w>": 20056, "rhinos</w>": 30671, "rho": 7637, "rhode": 39302, "rhode</w>": 27907, "rhodes</w>": 17785, "rhon": 25882, "rhonda</w>": 46100, "rhp</w>": 27199, "rhs</w>": 24551, "rhu": 23897, "rhubarb</w>": 30213, "rhy": 7740, "rhyme</w>": 37356, "rhymes</w>": 33143, "rhys</w>": 28647, "rhyth": 27069, "rhythm</w>": 16172, "rhythmic</w>": 46386, "rhythms</w>": 40872, "ri": 553, "ri</w>": 2574, "ria</w>": 3650, "rial</w>": 15200, "rian</w>": 7788, "rib": 44634, "rib</w>": 18298, "riba</w>": 44992, "ribb": 10081, "ribbon</w>": 12114, "ribbons</w>": 35271, "ribe": 46115, "ribs</w>": 17519, "ric": 920, "ric</w>": 4798, "rica</w>": 14230, "rical</w>": 18109, "rican</w>": 30958, "ricardo</w>": 23140, "ricci": 35783, "ricciardo</w>": 49282, "rice": 36362, "rice</w>": 4741, "rich": 5223, "rich</w>": 4021, "richar": 9350, "richard": 9080, "richard</w>": 4470, "richards</w>": 11372, "richardson</w>": 15984, "riche": 23286, "richer</w>": 34138, "riches</w>": 37093, "richest</w>": 25572, "richi": 38934, "richie</w>": 19797, "richland</w>": 43079, "richmond": 34143, "richmond</w>": 11292, "richter</w>": 37591, "rick": 6237, "rick</w>": 3064, "ricket": 46161, "ricket</w>": 23671, "ricks</w>": 23111, "ricky": 19188, "ricky</w>": 12814, "rico": 37962, "rico</w>": 11362, "ricotta</w>": 38473, "rics</w>": 7353, "ricul": 6980, "rid": 18103, "rid</w>": 9874, "ridd": 21990, "ridden</w>": 32025, "riddle</w>": 31839, "ride": 15816, "ride</w>": 2994, "rider": 31056, "rider</w>": 9707, "riders</w>": 10826, "rides</w>": 11308, "ridg": 42646, "ridge": 16580, "ridge</w>": 6352, "ridic": 9624, "ridiculous</w>": 12659, "ridiculously</w>": 25661, "ridin</w>": 47869, "riding</w>": 6765, "ridley</w>": 27883, "rie": 14824, "rie</w>": 5322, "ried</w>": 7552, "riel</w>": 26696, "rien</w>": 35237, "rier": 40714, "rier</w>": 13336, "ries": 28179, "ries</w>": 3059, "riesling</w>": 36372, "rif": 7044, "riff</w>": 30359, "rifle</w>": 15354, "rifles</w>": 25678, "rift</w>": 26681, "rig": 18462, "rig</w>": 13871, "riga</w>": 36626, "rigged</w>": 35897, "rigging</w>": 38160, "riggs</w>": 40328, "righ": 15391, "right": 13341, "right</w>": 1155, "righte": 20762, "righteous</w>": 28169, "righteousness</w>": 42481, "rightful</w>": 42601, "rightly</w>": 42669, "rights</w>": 3336, "rigid</w>": 43138, "rigor": 36788, "rigorous</w>": 41654, "rigs</w>": 42893, "rihanna</w>": 13744, "rij": 41097, "rik": 31136, "rik</w>": 27832, "rika</w>": 28580, "ril": 12270, "ril</w>": 2388, "riley": 35056, "riley</w>": 12260, "rill</w>": 23705, "rilla": 43956, "rilla</w>": 18685, "rim": 28147, "rim</w>": 12199, "rime</w>": 27064, "rimin": 11527, "rimo</w>": 47817, "rims</w>": 34327, "rin": 5859, "rin</w>": 11739, "rina</w>": 12869, "rine</w>": 24952, "ring": 8318, "ring</w>": 2540, "ringed</w>": 44712, "ringer</w>": 35761, "ringing</w>": 26035, "ringo</w>": 38845, "rings</w>": 5751, "rington</w>": 12455, "rink</w>": 21497, "rinka</w>": 47316, "rino</w>": 47188, "rinse</w>": 48320, "rio": 15681, "rio</w>": 5782, "rion": 31623, "rion</w>": 34046, "rios</w>": 32814, "riot": 32636, "riot</w>": 14218, "riots</w>": 24844, "rious</w>": 6340, "rip": 10353, "rip</w>": 4243, "ripe</w>": 22832, "ripley</w>": 41589, "ripp": 25276, "ripped</w>": 17815, "ripper</w>": 35347, "ripping</w>": 29126, "ripple</w>": 24825, "rips</w>": 30182, "rir</w>": 36792, "ris": 6108, "ris</w>": 1999, "rise": 13641, "rise</w>": 3151, "risen</w>": 23653, "risers</w>": 44983, "rises</w>": 13362, "riseup</w>": 35760, "rish": 18378, "rish</w>": 18927, "rishi": 48434, "rising": 30452, "rising</w>": 5448, "risis</w>": 37998, "risk": 27967, "risk</w>": 4213, "risking</w>": 48155, "risks</w>": 12474, "risky</w>": 27630, "risotto</w>": 31471, "rist</w>": 40610, "rit": 5156, "rit</w>": 17333, "rita</w>": 16178, "ritchie</w>": 30997, "rite": 39318, "rite</w>": 18429, "rites</w>": 36160, "rith": 48169, "rith</w>": 48850, "riti": 32904, "rito</w>": 19379, "ritos</w>": 33507, "ritt</w>": 26092, "ritter</w>": 34854, "ritu": 13391, "ritual</w>": 19712, "rituals</w>": 31145, "ritz": 39151, "ritz</w>": 25627, "rium</w>": 33884, "riv": 25113, "rival": 13412, "rival</w>": 15629, "rivalry</w>": 19511, "rivals</w>": 15135, "rive": 27588, "rive</w>": 34917, "river": 5239, "river</w>": 2473, "rivera</w>": 18275, "riverdale</w>": 28304, "riverfront</w>": 44439, "rivers</w>": 10723, "riverside</w>": 15809, "riveting</w>": 44024, "riviera</w>": 25851, "rix": 43407, "rix</w>": 9483, "riya</w>": 36908, "riyad": 31564, "riyadh</w>": 33577, "riz": 18426, "riz</w>": 35411, "rizal</w>": 41555, "rizio</w>": 40191, "rizz": 34826, "rizzo</w>": 49076, "rj": 26016, "rj</w>": 20949, "rk": 38725, "rk</w>": 21422, "rl": 18041, "rl</w>": 14590, "rlly</w>": 43222, "rly</w>": 25954, "rm": 20202, "rm</w>": 8431, "rmb</w>": 49097, "rms</w>": 40529, "rn": 13206, "rn</w>": 7666, "rna</w>": 24566, "rnb</w>": 31556, "rnc</w>": 35309, "rnli</w>": 29748, "ro": 532, "ro</w>": 2795, "roa": 8313, "roach</w>": 31073, "road": 4370, "road</w>": 1759, "roadhouse</w>": 47891, "roadmap</w>": 30111, "roads</w>": 6189, "roadsafety</w>": 39992, "roadshow</w>": 21168, "roadside</w>": 26928, "roadster</w>": 28920, "roadto": 24681, "roadtrip</w>": 15094, "roadway</w>": 42744, "roam</w>": 34045, "roaming</w>": 29240, "roano": 34184, "roanoke</w>": 36587, "roar": 34193, "roar</w>": 18483, "roaring</w>": 26428, "roast</w>": 11404, "roasted</w>": 10479, "roasting</w>": 32228, "rob": 2668, "rob</w>": 6442, "robb": 14059, "robb</w>": 39673, "robbed</w>": 24163, "robber</w>": 35545, "robbers</w>": 40852, "robbery</w>": 16393, "robbi": 44898, "robbie": 37200, "robbie</w>": 15970, "robbing</w>": 47569, "robbins</w>": 23461, "robby</w>": 44128, "robe</w>": 23116, "rober": 4532, "robert": 8811, "robert</w>": 3929, "roberta</w>": 43373, "roberto": 42645, "roberto</w>": 16227, "roberts</w>": 10366, "robertson</w>": 17643, "robes</w>": 29304, "robi</w>": 16743, "robin": 6681, "robin</w>": 7988, "robins</w>": 35502, "robinson</w>": 8523, "robles</w>": 47646, "roblo": 27481, "roblox</w>": 37798, "robo": 4672, "robo</w>": 36057, "robot": 46089, "robot</w>": 8797, "robotic</w>": 23975, "robotics</w>": 13546, "robots</w>": 13473, "robson</w>": 31113, "robust</w>": 22780, "robyn</w>": 34533, "roc": 3268, "roc</w>": 13776, "rocco</w>": 30009, "roch": 23788, "rochdale</w>": 41880, "roche</w>": 31776, "rochelle</w>": 40161, "rochester</w>": 18057, "rock": 2640, "rock</w>": 2172, "rockab": 39353, "rockabilly</w>": 45019, "rocke": 19914, "rocked</w>": 16116, "rockefeller</w>": 35476, "rocker</w>": 29008, "rockers</w>": 32338, "rocket": 25435, "rocket</w>": 8383, "rockets</w>": 13292, "rockford</w>": 41039, "rockies</w>": 20621, "rockin</w>": 12073, "rocking</w>": 7081, "rockn": 24442, "rocknroll</w>": 27840, "rocks</w>": 6135, "rockstar": 23603, "rockstar</w>": 18000, "rockstargames</w>": 27516, "rockstars</w>": 46639, "rockthe": 49363, "rockwell</w>": 34747, "rocky": 33481, "rocky</w>": 9648, "rod": 9712, "rod</w>": 8291, "roddy</w>": 42332, "rode</w>": 18449, "rodeo</w>": 18250, "rodgers</w>": 17612, "rodi": 49100, "rodney</w>": 21753, "rodri": 11053, "rodrigo</w>": 33944, "rodriguez</w>": 14057, "rods</w>": 28618, "roe": 27671, "roe</w>": 9996, "rof": 33029, "rofl</w>": 48228, "roft</w>": 45212, "rog": 34269, "rog</w>": 34017, "rogen</w>": 23380, "roger": 13929, "roger</w>": 7735, "rogerfederer</w>": 40182, "rogers</w>": 10661, "rogue": 32575, "rogue</w>": 15162, "roh": 14933, "roh</w>": 29840, "rohan</w>": 39848, "rohing": 23600, "rohingya</w>": 26146, "rohit": 44649, "rohit</w>": 24299, "roi</w>": 21877, "rok</w>": 36807, "rol": 3393, "rol</w>": 7818, "roland": 33713, "roland</w>": 19569, "role": 18485, "role</w>": 3414, "roles</w>": 11871, "rolex</w>": 21093, "rolf</w>": 48606, "roll": 4711, "roll</w>": 3341, "rolled</w>": 11982, "roller": 21034, "roller</w>": 12342, "rollercoaster</w>": 38248, "rollers</w>": 36941, "rollin</w>": 27545, "rolling": 24250, "rolling</w>": 6347, "rollingstones</w>": 41309, "rollins</w>": 27724, "rollout</w>": 47710, "rollover</w>": 39214, "rolls</w>": 8614, "rolltide</w>": 28101, "rom": 11377, "rom</w>": 19205, "roma": 44134, "roma</w>": 11631, "romain</w>": 48897, "roman": 4416, "roman</w>": 7370, "romance</w>": 7215, "romania</w>": 15884, "romanian</w>": 30866, "romano</w>": 38409, "romans</w>": 23066, "romantic": 41457, "romantic</w>": 8821, "rome": 9406, "rome</w>": 5243, "romeo</w>": 14429, "romero</w>": 23694, "romney</w>": 19287, "romo</w>": 32248, "romper</w>": 43699, "ron": 2393, "ron</w>": 3372, "rona</w>": 42385, "ronal": 46194, "ronald</w>": 15683, "ronaldo</w>": 13463, "ronan</w>": 34971, "rond": 31935, "ronda</w>": 37436, "rondo</w>": 43756, "rone": 48082, "rone</w>": 32763, "roni</w>": 47234, "ronnie": 45257, "ronnie</w>": 16421, "rons</w>": 19536, "ront</w>": 48881, "roo": 1249, "roo</w>": 31227, "rood</w>": 38007, "roof": 9120, "roof</w>": 6449, "roofing</w>": 24415, "roofs</w>": 34635, "rooftop</w>": 16319, "rook</w>": 35918, "rookie</w>": 9771, "rookies</w>": 31917, "room": 8845, "room</w>": 1530, "roomie</w>": 36851, "roommate</w>": 19825, "roommates</w>": 37323, "rooms</w>": 6328, "rooney</w>": 17712, "roos</w>": 32938, "roosevel": 17644, "roosevelt</w>": 18488, "rooster": 46263, "rooster</w>": 30926, "roosters</w>": 43693, "root": 25930, "root</w>": 9728, "rooted</w>": 30428, "rooting</w>": 25523, "roots</w>": 8084, "rop</w>": 43401, "rope</w>": 9953, "ropes</w>": 30506, "ror": 8668, "ror</w>": 2843, "rors</w>": 12072, "rory": 42804, "rory</w>": 17813, "ros": 5288, "ros</w>": 6930, "rosa</w>": 14393, "rosal": 30397, "rosario</w>": 33640, "rosary</w>": 33098, "rosberg</w>": 46037, "rose": 6146, "rose</w>": 3568, "roseanne</w>": 47528, "rosel": 33616, "rosemary</w>": 19472, "rosen": 13214, "rosen</w>": 36424, "rosenberg</w>": 43558, "rosenthal</w>": 46990, "roses</w>": 9061, "rosetta</w>": 43800, "rosewood</w>": 38686, "rosie": 43049, "rosie</w>": 16888, "ross": 8801, "ross</w>": 2158, "rosse</w>": 11602, "rossi</w>": 24817, "rosso</w>": 33023, "roster</w>": 12487, "roswell</w>": 45116, "rosy</w>": 46705, "rosé</w>": 28006, "rot": 10055, "rot</w>": 9643, "rotar": 45959, "rotary</w>": 14654, "rotating</w>": 32265, "rotation</w>": 18089, "rotc</w>": 32252, "roth": 17741, "roth</w>": 19139, "rother": 23174, "rotherham</w>": 37687, "rothschild</w>": 45089, "roti</w>": 46940, "roto": 34698, "rotor</w>": 42991, "rots</w>": 16642, "rott": 34806, "rotten</w>": 24324, "rotter": 22614, "rotterdam</w>": 23422, "rotun": 42970, "rou": 2964, "rou</w>": 34783, "roud</w>": 28375, "rouge</w>": 16209, "rough": 11699, "rough</w>": 8511, "roughly</w>": 21910, "roughs</w>": 37598, "rouhani</w>": 39912, "roulette</w>": 39930, "roun": 5602, "round": 9403, "round</w>": 2522, "roundabout</w>": 29953, "rounded</w>": 26973, "rounder</w>": 37024, "rounding</w>": 40208, "rounds</w>": 11242, "roundtable</w>": 19386, "roundup</w>": 17503, "roup</w>": 29220, "rourke</w>": 38753, "rous": 33645, "rous</w>": 34531, "rousey</w>": 46267, "rout": 7502, "rout</w>": 41778, "route</w>": 5261, "router</w>": 29962, "routes</w>": 14923, "routine</w>": 12319, "routines</w>": 44074, "routing</w>": 44086, "roux</w>": 43416, "rov</w>": 23971, "rove</w>": 30130, "rover</w>": 12776, "rovers</w>": 16373, "row": 5275, "row</w>": 1044, "rowan</w>": 26240, "rowdy</w>": 32141, "rowe</w>": 28323, "rowed</w>": 22615, "rower</w>": 43345, "rowers</w>": 41806, "rowing</w>": 12807, "rowland</w>": 33037, "rowley</w>": 48793, "rowling</w>": 29371, "rown": 22287, "rown</w>": 25060, "rows</w>": 9409, "rox": 14111, "rox</w>": 41033, "roxy</w>": 28093, "roy": 2128, "roy</w>": 6354, "royal": 6691, "royal</w>": 3853, "royale</w>": 20630, "royalnavy</w>": 41545, "royals</w>": 13335, "royalties</w>": 48660, "royalty</w>": 18296, "royalwedding</w>": 27461, "royce</w>": 18444, "royd</w>": 41476, "royo</w>": 39357, "roz": 28989, "roz</w>": 37250, "rp": 17305, "rp</w>": 8174, "rpa</w>": 41872, "rpg</w>": 12445, "rpm</w>": 23715, "rps</w>": 49215, "rr": 5311, "rr</w>": 9126, "rrp</w>": 36967, "rrr</w>": 18267, "rrrr": 25561, "rrrr</w>": 34444, "rs": 6978, "rs</w>": 1724, "rsa</w>": 29437, "rsc</w>": 48524, "rsd</w>": 34426, "rsi</w>": 39046, "rsl</w>": 44752, "rsp": 16381, "rspb": 38508, "rspb</w>": 36727, "rspca</w>": 45643, "rss": 46466, "rss</w>": 22350, "rstats</w>": 38700, "rsvp</w>": 9774, "rt": 8959, "rt</w>": 8991, "rtc</w>": 31648, "rte": 33822, "rte</w>": 23322, "rtg</w>": 22028, "rti</w>": 47549, "rtr</w>": 43999, "rts</w>": 8496, "rtw</w>": 34673, "ru": 681, "ru</w>": 13735, "rub": 15862, "rub</w>": 22586, "rubb": 19597, "rubbed</w>": 45239, "rubber": 31131, "rubber</w>": 11331, "rubbing</w>": 41262, "rubbish</w>": 21108, "rubble</w>": 42230, "ruben": 44058, "ruben</w>": 29722, "rubi": 27856, "rubin</w>": 34128, "rubio</w>": 24244, "rubs</w>": 43422, "ruby": 24552, "ruby</w>": 11493, "ruck": 27449, "rucker</w>": 45402, "rud": 35256, "rudd</w>": 31836, "rude</w>": 16548, "rudi": 48360, "rudol": 40927, "rudolf</w>": 46835, "rudolph</w>": 30119, "rudy": 38226, "rudy</w>": 22131, "rue": 38024, "rue</w>": 19276, "rufc</w>": 45084, "ruff": 28177, "ruff</w>": 30304, "rufus</w>": 39322, "rug": 4217, "rug</w>": 19220, "rugby": 15091, "rugby</w>": 4964, "rugbyleague</w>": 44419, "ruger</w>": 48655, "rugged</w>": 25225, "rugs</w>": 29946, "rui</w>": 46974, "ruin</w>": 16256, "ruined</w>": 17231, "ruining</w>": 29952, "ruins</w>": 16094, "ruiz</w>": 27873, "ruk</w>": 46628, "rukh": 43075, "rukh</w>": 27631, "rule": 31643, "rule</w>": 6175, "ruled</w>": 16324, "ruler</w>": 26286, "rulers</w>": 45328, "rules</w>": 5272, "ruling</w>": 14690, "rum": 9223, "rum</w>": 11233, "rumb": 42432, "rumble</w>": 18900, "rumi</w>": 31428, "rumor</w>": 22254, "rumored</w>": 36694, "rumors</w>": 16160, "rumour</w>": 34296, "rumours</w>": 20716, "rump</w>": 29366, "run": 1639, "run</w>": 1934, "runaway</w>": 28851, "runchat</w>": 25838, "rundown</w>": 41100, "rune": 33882, "rune</w>": 49244, "runner": 37370, "runner</w>": 7913, "runners</w>": 10571, "runnin</w>": 43130, "running": 24451, "running</w>": 2761, "runoff</w>": 38564, "runs</w>": 5586, "runway</w>": 13927, "rup": 7996, "rup</w>": 14980, "rupaul</w>": 44211, "rupee</w>": 43916, "rupees</w>": 44110, "rupert</w>": 25625, "rupt</w>": 23055, "ruption</w>": 35403, "rural": 28801, "rural</w>": 8737, "rus": 35811, "rus</w>": 5998, "rush": 12148, "rush</w>": 6973, "rushed</w>": 28104, "rusher</w>": 48745, "rushes</w>": 47217, "rushing</w>": 20284, "russ": 6285, "russ</w>": 20764, "russell": 26122, "russell</w>": 8150, "russi": 2600, "russia</w>": 4018, "russian": 30731, "russian</w>": 4868, "russians</w>": 25413, "russo</w>": 30679, "rust": 28682, "rust</w>": 14212, "rustic</w>": 19822, "rusty": 43966, "rusty</w>": 22646, "rut": 14973, "rut</w>": 39102, "rutger": 49029, "rutgers</w>": 28934, "ruth": 15798, "ruth</w>": 12029, "ruther": 26676, "rutherford</w>": 31070, "ruthless</w>": 36063, "rutland</w>": 46024, "ruto</w>": 43702, "ruz</w>": 23275, "rv": 17135, "rv</w>": 17951, "rva</w>": 24278, "rw": 9085, "rw</w>": 22926, "rwa</w>": 47452, "rwand": 31758, "rwanda</w>": 15427, "rwby</w>": 39698, "rwc</w>": 32321, "rx": 41188, "rx</w>": 15945, "ry": 1511, "ry</w>": 913, "ryan": 8682, "ryan</w>": 4053, "ryanair</w>": 43526, "ryder": 43564, "ryder</w>": 21805, "rye": 24015, "rye</w>": 17409, "rying</w>": 7838, "ryn</w>": 37728, "ryo": 24460, "rys</w>": 21654, "ryu": 46656, "ryu</w>": 34604, "ré": 29106, "s": 82, "s</w>": 338, "sa": 774, "sa</w>": 1344, "saa</w>": 13429, "saab</w>": 27158, "saad</w>": 36530, "saas</w>": 25761, "saat": 33151, "sab": 3233, "sab</w>": 23213, "saba</w>": 38344, "sabah</w>": 32854, "saban</w>": 41620, "sabar": 47102, "sabbath</w>": 26008, "sabc</w>": 30010, "sabcnews</w>": 41093, "saber": 46822, "saber</w>": 25624, "sabha</w>": 23431, "sabi</w>": 47073, "sabine</w>": 44062, "sable</w>": 19224, "sabot": 30700, "sabotage</w>": 40496, "sabre</w>": 35110, "sabres</w>": 29620, "sabrin": 37029, "sabrina</w>": 24994, "sac": 3632, "sac</w>": 12905, "sach": 30168, "sacha</w>": 49010, "sachin": 47527, "sachin</w>": 30297, "sachs</w>": 31451, "sack": 28964, "sack</w>": 14979, "sacked</w>": 27519, "sacks</w>": 26441, "sacram": 13334, "sacramento</w>": 16065, "sacred": 40612, "sacred</w>": 12477, "sacri": 15283, "sacrif": 12117, "sacrific": 16919, "sacrifice</w>": 12556, "sacrificed</w>": 31116, "sacrifices</w>": 28858, "sacrificing</w>": 48146, "sad": 2810, "sad</w>": 3719, "saddened</w>": 27720, "saddest</w>": 34925, "saddle": 30469, "saddle</w>": 20283, "sade</w>": 27429, "sadh": 40955, "sadi": 22207, "sadie</w>": 30333, "sadiq</w>": 44107, "sadler</w>": 45600, "sadly</w>": 11603, "sadness</w>": 20399, "sae": 38633, "sae</w>": 34883, "saeed</w>": 29745, "saf": 2125, "saf</w>": 25760, "safar": 23443, "safari</w>": 14091, "safarilive</w>": 34816, "safc</w>": 27998, "safe": 2901, "safe</w>": 2996, "safeguard</w>": 42249, "safeguarding</w>": 47451, "safely</w>": 11513, "safer": 40124, "safer</w>": 15504, "safest</w>": 38973, "safety": 19050, "safety</w>": 3406, "safetyfirst</w>": 43608, "saffron</w>": 27529, "sag": 6609, "sag</w>": 30048, "saga</w>": 15758, "sagan</w>": 37193, "sagar</w>": 42518, "sage": 25800, "sage</w>": 7509, "sages</w>": 25979, "sagin": 47097, "sagitt": 44685, "sagu": 44708, "sah": 30943, "sah</w>": 26342, "saha": 36062, "sahara</w>": 24599, "saharan</w>": 44255, "sahi": 24608, "sahib</w>": 34150, "sai": 16048, "sai</w>": 10886, "said": 40319, "said</w>": 1946, "saif</w>": 44164, "saig": 36328, "saigon</w>": 41081, "sail": 7528, "sail</w>": 12156, "sailed</w>": 43047, "sailing</w>": 11003, "sailor": 28002, "sailor</w>": 16076, "sailormoon</w>": 40673, "sailors</w>": 25355, "sails</w>": 27526, "sain": 21226, "sain</w>": 40378, "sains": 24860, "sainsbury</w>": 45879, "sainsburys</w>": 36934, "saint": 11274, "saint</w>": 5599, "saints</w>": 8769, "saintsfc</w>": 31102, "sair": 46600, "sair</w>": 30971, "saire</w>": 28087, "saison</w>": 33256, "sait": 48008, "saj": 33580, "sak": 11511, "sak</w>": 35900, "saka</w>": 33609, "sake</w>": 12874, "sakh</w>": 43945, "saki</w>": 40514, "saku": 37550, "sakura</w>": 24162, "sal": 980, "sal</w>": 6126, "sala</w>": 17300, "salaam</w>": 46773, "salad</w>": 6188, "salads</w>": 30948, "salah</w>": 22516, "salam": 19007, "salam</w>": 33963, "salamat</w>": 44696, "salami</w>": 46885, "salaries</w>": 33132, "salary</w>": 16312, "salazar</w>": 45988, "sale": 17786, "sale</w>": 1690, "saleh</w>": 38353, "salem": 48194, "salem</w>": 16884, "sales": 13347, "sales</w>": 3765, "salesforce</w>": 22680, "salesman</w>": 37633, "salford</w>": 25629, "sali": 15411, "salim</w>": 42760, "salinas</w>": 41990, "saline</w>": 46918, "salis": 20667, "salis</w>": 39378, "salisbury</w>": 24763, "sall": 27122, "sall</w>": 20883, "salle</w>": 23738, "sally": 29542, "sally</w>": 13349, "salman": 13754, "salman</w>": 16219, "salmankhan</w>": 15177, "salmon": 37040, "salmon</w>": 9137, "salom": 38268, "salon": 33916, "salon</w>": 11105, "saloon</w>": 26038, "sals</w>": 16307, "salsa</w>": 16442, "salt": 12763, "salt</w>": 6611, "salted</w>": 26313, "saltlife</w>": 47809, "salts</w>": 40559, "saltwater</w>": 43616, "salty</w>": 20678, "salu": 31711, "salud</w>": 46867, "salut": 44998, "salute": 44908, "salute</w>": 9747, "salutes</w>": 32762, "salv": 8299, "salvador</w>": 20874, "salvage</w>": 33131, "salvation</w>": 19534, "salvatore</w>": 38772, "salz": 33594, "salzburg</w>": 43396, "sam": 1644, "sam</w>": 3730, "sama</w>": 19272, "samanth": 11465, "samantha</w>": 15466, "samanthap": 38266, "samanthaprabhu</w>": 38643, "samar": 21820, "samaritan</w>": 45495, "samba</w>": 37190, "same": 23062, "same</w>": 2208, "samheughan</w>": 36255, "sami": 48400, "sami</w>": 24322, "sammy": 31091, "sammy</w>": 16758, "samo": 30006, "samoa</w>": 34932, "samp": 31225, "sample</w>": 9542, "sampler</w>": 40629, "samples</w>": 13387, "sampling</w>": 19522, "sampson</w>": 39983, "sams</w>": 44667, "samson</w>": 34659, "samsun": 47875, "samsung": 35369, "samsung</w>": 8115, "samu": 7646, "samuel": 30612, "samuel</w>": 12787, "samurai</w>": 21739, "san": 1489, "san</w>": 2223, "sana</w>": 19434, "sanantonio</w>": 34714, "sanat": 29091, "sanatomy</w>": 36052, "sanc": 7398, "sance</w>": 15930, "sanchez</w>": 13971, "sanctioned</w>": 43032, "sanctions</w>": 17790, "sanctu": 12712, "sanctuary</w>": 14044, "sand": 2147, "sand</w>": 5094, "sandal": 36445, "sandal</w>": 42185, "sandals</w>": 20731, "sandalwood</w>": 47502, "sandeep</w>": 46973, "sander</w>": 34111, "sanders</w>": 10429, "sanderson</w>": 36198, "sandi": 44249, "sandiego": 45997, "sandiego</w>": 15793, "sandman</w>": 45730, "sando": 35921, "sandoval</w>": 44157, "sandra": 33733, "sandra</w>": 13415, "sandro</w>": 42389, "sands</w>": 5936, "sandstone</w>": 36796, "sandwich": 17050, "sandwich</w>": 8687, "sandwiches</w>": 19667, "sandy": 29679, "sandy</w>": 10355, "sane</w>": 23419, "sanford</w>": 32330, "sanfrancisco</w>": 20254, "sang": 13235, "sang</w>": 11684, "sange": 12466, "sangria</w>": 42665, "sani": 39137, "sani</w>": 34492, "sanitary</w>": 33842, "sanitation</w>": 25414, "saniti": 43987, "sanity</w>": 30517, "sanjay": 31712, "sanjay</w>": 25796, "sanje": 40405, "sanjose</w>": 45971, "sank</w>": 43692, "sano</w>": 34053, "sans</w>": 16982, "sansk": 39689, "sanskrit</w>": 48083, "sant": 8356, "sant</w>": 23120, "santa": 22175, "santa</w>": 4555, "santac": 28876, "santam": 45627, "santana</w>": 27033, "santander</w>": 46476, "santi": 13856, "santiago</w>": 16568, "santo": 29631, "santo</w>": 18400, "santor": 28448, "santorini</w>": 39573, "santos</w>": 16582, "sany": 47679, "sao</w>": 28026, "sap": 8089, "sap</w>": 11591, "sapi": 40016, "sapp": 13427, "sapp</w>": 40729, "sapphire</w>": 22044, "sar": 1808, "sar</w>": 9424, "sara": 37196, "sara</w>": 10063, "sarab": 40716, "sarac": 35722, "sarah": 9086, "sarah</w>": 5327, "saraj": 42592, "sarajevo</w>": 48211, "saras": 20373, "sarasota</w>": 31990, "sarato": 24845, "saratoga</w>": 29496, "sarawak</w>": 47331, "sarcasm</w>": 37246, "sarcastic</w>": 48639, "sardar</w>": 41786, "sarde": 43925, "sardin": 27383, "sardinia</w>": 41025, "sare": 13051, "saree</w>": 30860, "sargent</w>": 34864, "sari": 42327, "sari</w>": 20261, "saries</w>": 47586, "sarkar</w>": 30673, "sarko": 33658, "sarkodie</w>": 42848, "sarmy</w>": 20954, "sart</w>": 33006, "sary</w>": 15398, "sas": 3960, "sas</w>": 5235, "sash</w>": 35656, "sasha": 46078, "sasha</w>": 20894, "sasia</w>": 44751, "sask": 47091, "sask</w>": 30416, "saskat": 17102, "saskatchewan</w>": 23899, "saskatoon</w>": 31128, "sass</w>": 31351, "sassy</w>": 20827, "sat": 1382, "sat</w>": 3279, "sata</w>": 41520, "satan</w>": 19446, "satanic</w>": 38224, "satchel</w>": 45908, "sate</w>": 35749, "satell": 9031, "satellite</w>": 10316, "satellites</w>": 28483, "sath": 29675, "sathletics</w>": 30154, "sati": 7038, "satin</w>": 21803, "sation</w>": 23674, "sations</w>": 31232, "satire</w>": 29875, "satis": 9906, "satisf": 22941, "satisfaction</w>": 19925, "satisfied</w>": 18101, "satisfy</w>": 29444, "satisfying</w>": 23755, "sato</w>": 34376, "satu</w>": 45283, "satur": 1634, "saturated</w>": 32466, "saturday": 12537, "saturday</w>": 1748, "saturdaymorning</w>": 29053, "saturdaymotivation</w>": 40843, "saturdays</w>": 18930, "saturn</w>": 17312, "saty": 39426, "sau": 2096, "sau</w>": 19455, "sauce</w>": 5520, "saucer</w>": 42272, "sauces</w>": 40367, "saucy</w>": 46684, "saudi": 24511, "saudi</w>": 8548, "saudiarabia</w>": 28680, "sauer": 46333, "saul": 47623, "saul</w>": 23252, "sault</w>": 40361, "sauna</w>": 35460, "saunders</w>": 23794, "saur</w>": 13227, "saura": 46532, "saurus</w>": 22118, "saus</w>": 36121, "sausage</w>": 11855, "sausages</w>": 31593, "sauté": 36290, "sautéed</w>": 38517, "sauvi": 30116, "sauvignon</w>": 32745, "sav": 2248, "sav</w>": 26533, "sava</w>": 40198, "savag": 43039, "savage</w>": 11859, "savannah</w>": 18662, "save": 5895, "save</w>": 2673, "saved</w>": 7137, "saveour": 33390, "saver</w>": 20987, "savers</w>": 31416, "saves</w>": 12907, "savethe": 18031, "savi": 14721, "saving": 28498, "saving</w>": 6979, "savings</w>": 10651, "savior</w>": 24762, "saviour</w>": 35800, "savor</w>": 48071, "savory</w>": 32992, "savoury</w>": 49071, "savoy</w>": 39552, "savvy</w>": 29278, "saw": 12429, "saw</w>": 2425, "sawa</w>": 39613, "sawards</w>": 29012, "sawyer</w>": 27726, "sax": 14169, "sax</w>": 23766, "saxon</w>": 31856, "saxophon": 43760, "saxophone</w>": 32296, "say": 3047, "say</w>": 1451, "saya</w>": 35170, "sayang</w>": 46322, "sayers</w>": 44116, "sayin</w>": 23662, "saying</w>": 4455, "says</w>": 1563, "saz</w>": 35577, "sb": 5576, "sb</w>": 4977, "sba</w>": 44970, "sback</w>": 43840, "sband</w>": 27539, "sbaseball</w>": 46491, "sbball</w>": 39190, "sbc</w>": 31404, "sberg</w>": 20358, "sbi</w>": 41369, "sbk</w>": 39211, "sboro</w>": 18909, "sbridge</w>": 49228, "sbs</w>": 18883, "sbu": 48075, "sbu</w>": 46281, "sburg</w>": 7390, "sburgh</w>": 48205, "sbury</w>": 14081, "sby": 26519, "sby</w>": 10287, "sc": 663, "sc</w>": 3219, "sca</w>": 11001, "scab": 31716, "scaf": 28981, "scafe</w>": 45574, "scaffolding</w>": 41687, "scal": 10859, "scala</w>": 37997, "scalable</w>": 44084, "scale": 37817, "scale</w>": 5879, "scaled</w>": 41923, "scales</w>": 22891, "scaling</w>": 29116, "scallo": 19936, "scallop</w>": 39544, "scallops</w>": 31430, "scalp</w>": 38898, "scam": 17620, "scam</w>": 13215, "scamp</w>": 28451, "scams</w>": 34395, "scan": 10650, "scan</w>": 11261, "scanada</w>": 27121, "scand": 8110, "scandal": 35420, "scandal</w>": 11622, "scandals</w>": 45490, "scandin": 32014, "scandinavian</w>": 35661, "scanned</w>": 43719, "scanner</w>": 24185, "scanning</w>": 24092, "scans</w>": 31251, "scap": 35883, "scape": 36005, "scape</w>": 12314, "scapes</w>": 31933, "scar": 4171, "scar</w>": 18088, "scarborough</w>": 24254, "scarce</w>": 38572, "scarcity</w>": 45812, "scare": 33536, "scare</w>": 15920, "scarec": 38814, "scarecrow</w>": 46504, "scared</w>": 9870, "scares</w>": 34096, "scarf</w>": 13365, "scari": 27050, "scariest</w>": 37213, "scarlet</w>": 20389, "scarlett</w>": 28325, "scars</w>": 20747, "scarves</w>": 29249, "scary</w>": 9250, "scat": 13899, "scattered</w>": 22090, "scavenger</w>": 36778, "scc</w>": 19458, "scd</w>": 48422, "scen": 2204, "scenario</w>": 20456, "scenarios</w>": 31346, "scence</w>": 33418, "scene</w>": 3562, "scenery</w>": 16025, "scenes</w>": 5415, "scenic</w>": 15394, "scent": 36277, "scent</w>": 7683, "scented</w>": 27190, "scenter</w>": 23059, "scentre</w>": 39371, "scents</w>": 26336, "scep": 24439, "scfc</w>": 38578, "sch": 844, "sch</w>": 7542, "scha": 42809, "schaf": 45588, "schaft</w>": 41010, "schal": 35568, "schalke</w>": 41029, "schallenge</w>": 43665, "schan": 31328, "schar": 15085, "schat</w>": 31842, "schau": 35830, "sche": 3038, "sche</w>": 7289, "schedu": 4207, "schedule</w>": 5521, "scheduled</w>": 10986, "schedules</w>": 28986, "scheduling</w>": 32216, "scheer</w>": 26776, "schel": 39881, "schel</w>": 38569, "schem": 17720, "scheme</w>": 9024, "schemes</w>": 22958, "schen": 22738, "scher": 21925, "scher</w>": 21299, "schi": 13731, "schi</w>": 24984, "schicago</w>": 46230, "schiff</w>": 39431, "schild</w>": 32148, "schiz": 33230, "schizoph": 40004, "schizophre": 41163, "schle": 32022, "schmid": 17375, "schmidt</w>": 18463, "schnau": 45745, "schnei": 19941, "schneider</w>": 22972, "schnit": 40903, "scho": 2493, "schoice</w>": 23860, "schol": 4498, "scholar": 7192, "scholar</w>": 12830, "scholarly</w>": 41065, "scholars</w>": 13818, "scholarship</w>": 9070, "scholarships</w>": 17866, "scholastic</w>": 35743, "schoo": 20721, "school": 6063, "school</w>": 1228, "schooled</w>": 44722, "schoolers</w>": 31455, "schooling</w>": 28608, "schools</w>": 3513, "schre": 47685, "schri": 25453, "schro": 32381, "schu": 11318, "schubert</w>": 46939, "schul": 14945, "schultz</w>": 30308, "schulz</w>": 39572, "schumacher</w>": 39208, "schumer</w>": 25313, "schur": 42475, "schwab</w>": 47602, "schwar": 13985, "schwartz</w>": 30617, "schwarz": 27074, "schwarzenegger</w>": 33860, "schwe": 25324, "sci": 2267, "sci</w>": 8309, "sciart</w>": 31704, "scicom": 28606, "scicomm</w>": 29573, "scien": 39261, "science": 10201, "science</w>": 2497, "sciencefiction</w>": 39170, "sciences</w>": 11481, "scienti": 4338, "scientific</w>": 9750, "scientist</w>": 11083, "scientists</w>": 8045, "sciento": 36193, "scientology</w>": 44694, "scifi": 41862, "scifi</w>": 12230, "scion</w>": 47208, "sciss": 25667, "scissors</w>": 30867, "sciutto</w>": 44392, "sclerosis</w>": 39446, "sclub</w>": 20017, "sco": 1065, "sco</w>": 4763, "scoe</w>": 31164, "scol": 13599, "scoll</w>": 44895, "scollege</w>": 39536, "scom</w>": 26407, "scon": 17163, "scon</w>": 29272, "scones</w>": 36443, "sconf</w>": 39704, "scoo": 14199, "scooby</w>": 34469, "scoop</w>": 13829, "scoops</w>": 41360, "scope</w>": 7979, "scopes</w>": 30328, "scopic</w>": 23869, "scopy</w>": 20018, "scor": 8442, "score": 12067, "score</w>": 4431, "scoreboard</w>": 30104, "scorecard</w>": 38128, "scored</w>": 6143, "scoreless</w>": 33469, "scorer</w>": 16572, "scorers</w>": 26699, "scores</w>": 7039, "scoring</w>": 9198, "scorpi": 15445, "scorpio</w>": 34331, "scorpion</w>": 28461, "scorpions</w>": 45401, "scorsese</w>": 45975, "scot": 2496, "scot</w>": 9271, "scotch</w>": 16687, "scoti": 46446, "scotia</w>": 27859, "scotland": 29174, "scotland</w>": 4203, "scots</w>": 17260, "scotsman</w>": 39612, "scott": 7775, "scott</w>": 3664, "scotti": 6227, "scottish": 18039, "scottish</w>": 7442, "scottsdale</w>": 27817, "scotty": 39697, "scotty</w>": 26836, "scotus</w>": 21720, "scou": 44909, "scoun": 16110, "scouncil</w>": 48787, "scountry</w>": 40432, "scour": 46172, "scout": 32213, "scout</w>": 10786, "scouting</w>": 19072, "scouts</w>": 14837, "scow": 27929, "scowboys</w>": 31386, "scp</w>": 45030, "scr</w>": 36131, "scra": 11187, "scrabble</w>": 39488, "scram": 17289, "scramble</w>": 32688, "scrambled</w>": 39026, "scran": 41774, "scranton</w>": 45274, "scrap": 27950, "scrap</w>": 21695, "scrapbook</w>": 48733, "scrapped</w>": 43325, "scraps</w>": 40809, "scrat": 9572, "scratch</w>": 13258, "scratched</w>": 48831, "scratches</w>": 46556, "scratching</w>": 44617, "scre": 1795, "scream": 31645, "scream</w>": 13239, "screamed</w>": 35427, "screaming</w>": 12891, "screams</w>": 23989, "screen": 5351, "screen</w>": 3750, "screened</w>": 31450, "screening</w>": 6688, "screenings</w>": 27655, "screenplay</w>": 30058, "screens</w>": 12689, "screenshot": 20637, "screenshot</w>": 12646, "screenshots</w>": 26783, "screenshotsaturday</w>": 21406, "screenwriter</w>": 37293, "screenwriting</w>": 35465, "screw": 25529, "screw</w>": 14225, "screwdriver</w>": 48748, "screwed</w>": 30592, "screws</w>": 38292, "scri": 2139, "scrib": 34259, "scribe</w>": 36228, "scribed</w>": 38334, "scricket</w>": 45947, "scrim": 21978, "scrimmage</w>": 25216, "scrip": 11955, "script</w>": 8374, "scripted</w>": 40513, "scription</w>": 26604, "scriptions</w>": 39512, "scripts</w>": 20109, "scripture</w>": 27186, "scro": 30768, "scroll</w>": 24160, "scrolling</w>": 28889, "scrolls</w>": 38113, "scroo": 42263, "scru": 7589, "scrub</w>": 23432, "scrubs</w>": 37919, "scrum</w>": 29047, "scrump": 39791, "scrumptious</w>": 40987, "scrutiny</w>": 34305, "scs</w>": 26853, "sct</w>": 39284, "scu": 8181, "scu</w>": 32135, "scuba": 39053, "scuba</w>": 20559, "scubadiving</w>": 49046, "scue</w>": 25955, "scul": 4948, "scully</w>": 36598, "sculp": 6093, "sculpt</w>": 45044, "sculpted</w>": 41296, "sculpting</w>": 44389, "sculptor</w>": 29409, "sculpture</w>": 8757, "sculptures</w>": 20378, "scum</w>": 29655, "scumb": 44525, "scup</w>": 21506, "scur": 32742, "scwx</w>": 41966, "scy": 27471, "sd": 3080, "sd</w>": 4159, "sda</w>": 25548, "sdale</w>": 12327, "sday": 5902, "sday</w>": 1376, "sdays</w>": 14491, "sdc</w>": 40992, "sdcc</w>": 13246, "sden</w>": 17241, "sdf</w>": 34681, "sdg</w>": 20177, "sdgs</w>": 16261, "sdk</w>": 40015, "sdlive</w>": 34561, "sdn</w>": 41925, "sdsu</w>": 41284, "se": 567, "se</w>": 611, "sea": 5970, "sea</w>": 2102, "seab": 15728, "seabir": 42558, "seac": 35626, "seaf": 9336, "seafood</w>": 12472, "seag": 15730, "seagu": 38076, "seagull</w>": 38858, "seagulls</w>": 42215, "seahawks</w>": 15341, "seal": 21381, "seal</w>": 10159, "sealed</w>": 13358, "sealing</w>": 42992, "seals</w>": 18179, "seam": 13710, "seam</w>": 44201, "seaman</w>": 47513, "seamless</w>": 29373, "seamus</w>": 40175, "sean": 11406, "sean</w>": 6077, "seanhannity</w>": 43316, "seap": 29983, "seaport</w>": 46418, "sear": 1612, "search": 23129, "search</w>": 1920, "searched</w>": 28961, "searches</w>": 26378, "searching</w>": 10626, "seared</w>": 29727, "sears</w>": 26693, "seas": 7329, "seas</w>": 9556, "seascape</w>": 42593, "seaside</w>": 18867, "season": 19288, "season</w>": 1367, "seasonal</w>": 14215, "seasoned</w>": 28399, "seasoning</w>": 43439, "seasons</w>": 8635, "seat": 19670, "seat</w>": 4922, "seated</w>": 23953, "seater</w>": 37543, "seating</w>": 16240, "seats</w>": 6944, "seattle": 24388, "seattle</w>": 6274, "seau</w>": 32263, "seaw": 32658, "seaweed</w>": 30204, "seaworld</w>": 27422, "seb": 35766, "seb</w>": 25171, "sebasti": 10324, "sebastian": 43792, "sebastian</w>": 13181, "sebring</w>": 41086, "sec": 2875, "sec</w>": 5338, "seca</w>": 37847, "secco</w>": 27394, "sece": 46297, "seclu": 42392, "secon": 1846, "second": 9329, "second</w>": 2241, "secondary</w>": 13107, "seconds</w>": 6541, "secre": 2460, "secret": 20710, "secret</w>": 4145, "secretari": 29515, "secretariat</w>": 31767, "secretary</w>": 6552, "secretly</w>": 21400, "secrets</w>": 9735, "secs</w>": 28665, "sect</w>": 15772, "section": 34986, "section</w>": 4853, "sectional</w>": 21876, "sections</w>": 20061, "sector</w>": 6579, "sectors</w>": 22173, "secu": 4894, "secular": 47483, "secular</w>": 27560, "secur": 2557, "secure": 44763, "secure</w>": 7515, "secured</w>": 16848, "secures</w>": 31567, "securing</w>": 24759, "securities</w>": 25080, "security": 31245, "security</w>": 2741, "sed": 14034, "sed</w>": 1252, "sedan</w>": 24237, "sedg": 46926, "sedge</w>": 45288, "sedi": 29269, "sedly</w>": 31771, "sedona</w>": 46862, "seduc": 19933, "seductive</w>": 43721, "see": 1751, "see</w>": 862, "seed": 14064, "seed</w>": 6488, "seeded</w>": 33688, "seeding</w>": 40050, "seedlings</w>": 47933, "seeds</w>": 9128, "seeing</w>": 3214, "seek</w>": 8839, "seeker</w>": 28011, "seekers</w>": 20732, "seeking</w>": 8592, "seeks</w>": 12594, "seem": 20043, "seem</w>": 7523, "seemed</w>": 17240, "seemingly</w>": 25917, "seems</w>": 4453, "seen": 36273, "seen</w>": 2041, "seer</w>": 32486, "sees</w>": 7594, "seeyou": 41279, "sef</w>": 27453, "seg": 10551, "sega</w>": 16122, "segment</w>": 15615, "segments</w>": 43053, "segreg": 49117, "segregation</w>": 39086, "segu": 33156, "segun": 43087, "seh": 27536, "seh</w>": 41430, "sehun</w>": 17705, "sei": 13130, "sei</w>": 15907, "sein</w>": 24669, "seine</w>": 41378, "seinfeld</w>": 33706, "seis": 25559, "seismic</w>": 38459, "seiz": 22171, "seize</w>": 26624, "seized</w>": 15826, "seizure</w>": 36804, "seizures</w>": 47199, "sek": 45515, "sek</w>": 25880, "sel": 1000, "sel</w>": 4098, "sela</w>": 47006, "selamat</w>": 37692, "selangor</w>": 44402, "selby</w>": 43546, "selca</w>": 38606, "selcaday</w>": 35924, "seldom</w>": 48322, "sele": 29137, "selec": 3014, "select</w>": 8690, "selected</w>": 6881, "selecting</w>": 32696, "selection</w>": 6724, "selections</w>": 24099, "selective</w>": 28686, "selects</w>": 32902, "selen": 19970, "selena</w>": 14677, "selenagomez</w>": 27653, "seley</w>": 30556, "self": 10139, "self</w>": 1322, "selfcare</w>": 39560, "selfi": 3007, "selfie": 26735, "selfie</w>": 3666, "selfies": 46058, "selfies</w>": 10050, "selfish</w>": 26907, "selfless</w>": 34236, "sell": 10279, "sell</w>": 5119, "seller</w>": 11779, "sellers</w>": 16562, "selling</w>": 4396, "sells</w>": 14306, "selma</w>": 36652, "sels</w>": 42070, "selves</w>": 4505, "sely</w>": 8402, "sem": 8645, "sem</w>": 17106, "sema</w>": 31816, "seman": 29119, "seman</w>": 28378, "semana</w>": 41780, "semb": 36054, "seme": 10855, "sement</w>": 10714, "sements</w>": 31449, "semester</w>": 11905, "semi": 11023, "semi</w>": 6684, "semic": 26967, "semicon": 34315, "semiconduc": 35646, "semiconductor</w>": 43551, "semifinal</w>": 22935, "semifinals</w>": 21863, "semin": 5595, "seminar</w>": 7269, "seminars</w>": 34870, "seminary</w>": 31655, "seminole</w>": 42956, "semis</w>": 24013, "semit": 22628, "semite</w>": 23721, "semitic</w>": 34894, "semitism</w>": 25911, "semper": 47391, "sen": 1057, "sen</w>": 2249, "sena</w>": 21584, "senate": 30703, "senate</w>": 6843, "senator": 20871, "senator</w>": 8495, "senators</w>": 16889, "send": 27684, "send</w>": 3625, "sending</w>": 6985, "sends</w>": 10817, "sene": 25269, "seneca</w>": 33419, "senegal</w>": 28255, "senew": 49313, "seng": 43022, "seng</w>": 29971, "senior": 19865, "senior</w>": 3415, "seniors</w>": 8138, "senna</w>": 36195, "senpai</w>": 46562, "sens": 5218, "sens</w>": 22837, "sensation</w>": 19383, "sensational</w>": 23051, "sense": 29162, "sense</w>": 4747, "sensei</w>": 36158, "senses</w>": 21809, "sensi": 38802, "sensible</w>": 30635, "sensing</w>": 29236, "sensiti": 20531, "sensitive</w>": 13734, "sensitivity</w>": 27788, "sensor</w>": 15330, "sensors</w>": 20356, "sensory</w>": 21831, "sensu": 28157, "sensual</w>": 40860, "sent": 6200, "sent</w>": 3676, "sentence</w>": 12737, "sentenced</w>": 17773, "sentences</w>": 25858, "sentencing</w>": 34394, "senti": 19042, "sentim": 25102, "sentiment</w>": 25949, "sentimental</w>": 40070, "sentiments</w>": 47450, "sentin": 20042, "sentinel</w>": 23123, "senting</w>": 3924, "seo": 24743, "seo</w>": 8622, "seok": 34697, "seok</w>": 22482, "seokjin</w>": 45584, "seoul</w>": 13253, "sep": 3212, "sep</w>": 10434, "separ": 6859, "separate</w>": 13886, "separated</w>": 22163, "separately</w>": 41904, "separates</w>": 45365, "separati": 39377, "separating</w>": 43480, "separation</w>": 22007, "sephora</w>": 38414, "sepsis</w>": 40205, "sept</w>": 5380, "septe": 3672, "september</w>": 3707, "septic</w>": 34690, "sepul": 47360, "seq</w>": 44379, "sequ": 5491, "seque": 44662, "sequel</w>": 15701, "sequence</w>": 18833, "sequences</w>": 47306, "sequencing</w>": 33484, "sequo": 32781, "sequoia</w>": 42404, "ser": 803, "ser</w>": 2771, "sera</w>": 28250, "serbia</w>": 19038, "serbian</w>": 33687, "sere": 35770, "seren": 7880, "serena</w>": 19519, "serenawilliams</w>": 48316, "serendip": 45805, "serendipity</w>": 49386, "serene</w>": 28269, "serenity</w>": 24187, "serge": 13477, "serge</w>": 35700, "sergeant</w>": 22049, "sergei</w>": 39870, "sergey</w>": 35390, "sergi": 47675, "sergio</w>": 18359, "seri": 2763, "seri</w>": 37509, "serial</w>": 14216, "serie</w>": 19752, "seriea</w>": 32660, "series</w>": 1857, "serious": 47421, "serious</w>": 4770, "seriously</w>": 4885, "sermon</w>": 24884, "sero": 48883, "serpent": 37084, "serpent</w>": 35364, "serra</w>": 39851, "serrano</w>": 44236, "sers</w>": 13509, "serum</w>": 25385, "serv": 1297, "serv</w>": 24571, "servant</w>": 20810, "servants</w>": 29652, "serve": 39202, "serve</w>": 2838, "served</w>": 4740, "server": 36458, "server</w>": 8398, "serverless</w>": 49243, "servers</w>": 22262, "serves</w>": 9915, "servic": 27115, "service": 21496, "service</w>": 2086, "serviced</w>": 44687, "services</w>": 3100, "servicing</w>": 41300, "serving</w>": 5722, "sery</w>": 14279, "ses": 23708, "ses</w>": 1386, "sesame</w>": 21706, "sese</w>": 37128, "sesh</w>": 24274, "session</w>": 2550, "sessions</w>": 6327, "set": 7965, "set</w>": 1167, "setback</w>": 43605, "seth": 20005, "seth</w>": 11870, "sethu": 38933, "setlist</w>": 33141, "seton</w>": 43799, "sets</w>": 4650, "sett": 4984, "sett</w>": 17567, "sette</w>": 14613, "setter</w>": 23153, "settes</w>": 44145, "setti</w>": 45170, "setting</w>": 5264, "settings</w>": 18628, "settle</w>": 15075, "settled</w>": 18310, "settlement</w>": 16494, "settlements</w>": 36605, "settlers</w>": 35671, "settles</w>": 41498, "settling</w>": 22036, "setup</w>": 11092, "seu</w>": 31539, "seul": 48975, "seum</w>": 18838, "seun": 24209, "seung": 32393, "seung</w>": 33711, "seungri</w>": 41627, "seuss</w>": 34441, "sev": 26585, "sev</w>": 37600, "seva</w>": 42604, "seve": 21458, "seve</w>": 22468, "sevel": 17439, "seven": 7874, "seven</w>": 5757, "sevens</w>": 29911, "sevent": 43048, "seventeen</w>": 19337, "seventh</w>": 17568, "seventy</w>": 47170, "sever": 3250, "sever</w>": 45557, "several</w>": 5560, "severance</w>": 26194, "severe</w>": 6215, "severely</w>": 24417, "severn</w>": 34626, "severy": 34207, "sevilla</w>": 24947, "seville</w>": 34988, "sew</w>": 28640, "sewage</w>": 32777, "sewer</w>": 28294, "sewing</w>": 15974, "sewn</w>": 42118, "sex": 3548, "sex</w>": 5937, "sexi": 20562, "sexiest</w>": 25426, "sexism</w>": 32059, "sexist</w>": 33047, "sexu": 14741, "sexual</w>": 6749, "sexuality</w>": 21244, "sexually</w>": 23032, "sexy": 21019, "sexy</w>": 38127, "sey": 6317, "sey</w>": 2258, "seychel": 36809, "seychelles</w>": 38519, "seye</w>": 35604, "seym": 22657, "seymour</w>": 25850, "seys</w>": 15081, "sez</w>": 42377, "señ": 43368, "sf": 4435, "sf</w>": 4915, "sfa</w>": 32675, "sfam</w>": 37649, "sfb</w>": 27930, "sfc</w>": 14129, "sfest</w>": 49024, "sff</w>": 42056, "sfgiants</w>": 20923, "sfield</w>": 11801, "sfo</w>": 39182, "sfootball</w>": 45259, "sfor": 9115, "sford</w>": 28917, "sforsale</w>": 28888, "sfw</w>": 18073, "sfx</w>": 37995, "sg": 9599, "sg</w>": 7611, "sga</w>": 33049, "sgate</w>": 27558, "sgh</w>": 47590, "sgo": 5393, "sgo</w>": 21044, "sgt</w>": 13748, "sh": 552, "sh</w>": 849, "sha": 1514, "sha</w>": 3337, "shaa</w>": 44221, "shab": 8323, "shabbat</w>": 38042, "shabby</w>": 28838, "shack": 23866, "shack</w>": 18785, "shad": 3182, "shad</w>": 23874, "shade": 34554, "shade</w>": 10097, "shaded</w>": 43506, "shades": 46608, "shades</w>": 9270, "shadesof": 45180, "shading</w>": 37348, "shado": 9325, "shadow": 15243, "shadow</w>": 7068, "shadowhun": 19931, "shadowhunters</w>": 24834, "shadowing</w>": 46092, "shadows</w>": 12971, "shady</w>": 22158, "shaf": 12032, "shaft</w>": 21545, "shag": 22439, "shaggy</w>": 42662, "shah": 13203, "shah</w>": 8439, "shahe": 23643, "shaheed</w>": 30060, "shaheer</w>": 43969, "shahi</w>": 46972, "shahid": 25696, "shahid</w>": 27138, "shahidkapoor</w>": 29892, "shahzad</w>": 45915, "shai</w>": 47941, "shaikh</w>": 45712, "shail": 37603, "shair</w>": 43135, "shak": 8385, "shake": 8206, "shake</w>": 8251, "shaken</w>": 38237, "shaker</w>": 26210, "shakers</w>": 38411, "shakes</w>": 19668, "shakespe": 9890, "shakespeare": 22499, "shakespeare</w>": 12488, "shakespearesunday</w>": 32320, "shaking</w>": 19101, "shakira</w>": 40795, "shakti": 48593, "shakti</w>": 32458, "shakur</w>": 48915, "shal": 15056, "shal</w>": 28175, "shale</w>": 32864, "shall</w>": 4742, "shallow</w>": 23730, "shalom</w>": 31339, "sham": 6453, "sham</w>": 9005, "shaman</w>": 48727, "shambles</w>": 40799, "shame": 14776, "shame</w>": 7593, "shameful</w>": 28283, "shameless</w>": 25380, "shaming</w>": 40553, "shampoo</w>": 23944, "shamrock</w>": 34199, "shan": 5171, "shan</w>": 8834, "shana</w>": 44835, "shand": 29101, "shane": 26863, "shane</w>": 11572, "shang": 11141, "shanghai</w>": 12742, "shani": 46665, "shank</w>": 24685, "shankar</w>": 24108, "shann": 9932, "shannon": 22842, "shannon</w>": 13581, "shant": 36610, "shap": 5581, "shape": 26925, "shape</w>": 6448, "shaped</w>": 10127, "shapes</w>": 15377, "shaping</w>": 18632, "shapiro</w>": 32110, "shaq": 46402, "shaq</w>": 26843, "shar": 1669, "shar</w>": 36542, "shara</w>": 48849, "sharapo": 36489, "sharapova</w>": 36671, "shard</w>": 42207, "share": 7585, "share</w>": 1978, "shared</w>": 5368, "shareholder</w>": 38241, "shareholders</w>": 34778, "sharepoint</w>": 39213, "shares</w>": 4974, "sharethe": 49277, "shareyour": 45890, "shari": 27738, "shari</w>": 47390, "sharia</w>": 37244, "sharif</w>": 15501, "sharing</w>": 3567, "sharjah</w>": 33420, "shark": 15836, "shark</w>": 7980, "sharks</w>": 10047, "sharkweek</w>": 39571, "sharma</w>": 10105, "sharon": 28722, "sharon</w>": 14138, "sharp": 17126, "sharp</w>": 8157, "sharpe</w>": 34374, "sharpen": 41465, "sharpie</w>": 46858, "sharply</w>": 37185, "shasta</w>": 46727, "shat": 12169, "shat</w>": 44388, "shatter</w>": 45008, "shattered</w>": 26820, "shau": 13750, "shaun": 23446, "shaun</w>": 16669, "shav": 11410, "shave</w>": 17735, "shaved</w>": 25571, "shaving</w>": 24261, "shaw": 6122, "shaw</w>": 6805, "shawa</w>": 46413, "shawl</w>": 35132, "shawn": 16677, "shawn</w>": 10970, "shawnee</w>": 48060, "shawnmendes</w>": 27277, "shawty</w>": 38026, "shay": 10778, "shay</w>": 18361, "shaykh</w>": 47223, "shaz": 18618, "shazam</w>": 29063, "shc</w>": 43419, "shd</w>": 37729, "she": 1729, "she</w>": 1043, "shea</w>": 20407, "shead": 44287, "shead</w>": 20434, "shealth": 41743, "shealth</w>": 22197, "shear": 27974, "shear</w>": 32108, "shearer</w>": 40505, "sheath</w>": 45637, "shed": 16586, "shed</w>": 1492, "shedding</w>": 33608, "sheds</w>": 25921, "shee": 23450, "shee</w>": 34321, "sheed</w>": 26105, "sheehan</w>": 41809, "sheen</w>": 25025, "sheep": 23604, "sheep</w>": 9629, "sheer</w>": 17577, "sheeran</w>": 18561, "sheet</w>": 7298, "sheets</w>": 12744, "shef": 8237, "sheff": 38844, "sheff</w>": 43821, "sheffiel": 26940, "sheffield": 41763, "sheffield</w>": 10420, "sheffieldissuper</w>": 33628, "sheh": 31667, "sheikh</w>": 15031, "sheil": 42765, "sheila</w>": 25734, "shek</w>": 33285, "shel": 3159, "shelby": 36906, "shelby</w>": 16885, "sheldon</w>": 25079, "shelf</w>": 10955, "shell": 23374, "shell</w>": 6648, "shelley</w>": 22497, "shelling</w>": 43166, "shells</w>": 19265, "shelly</w>": 37461, "shelter</w>": 8599, "sheltered</w>": 48070, "shelters</w>": 24312, "shelton</w>": 24471, "shelves</w>": 16225, "shem</w>": 40299, "shen": 10154, "shen</w>": 31098, "shenan": 20965, "shenando": 44666, "shenanigans</w>": 26590, "shenko</w>": 39751, "shenmue</w>": 48279, "shenzhen</w>": 38970, "shep": 33757, "shep</w>": 44857, "shepard</w>": 26810, "shepher": 11008, "shepherd</w>": 13242, "shepherds</w>": 42792, "sheppard</w>": 37304, "sher": 3570, "sher</w>": 4510, "sheraton</w>": 39400, "shere</w>": 21507, "sheri": 9235, "sheridan</w>": 27085, "sheriff</w>": 10309, "sherlock</w>": 17294, "sherman</w>": 17822, "sherry": 44348, "sherry</w>": 24689, "shers</w>": 14141, "sherwood</w>": 24527, "sheryl</w>": 39773, "shes": 45514, "shes</w>": 2502, "shet": 15850, "shetland</w>": 29595, "shetty</w>": 25533, "shev": 45182, "sheva</w>": 45132, "shh</w>": 35025, "shhh</w>": 36932, "shi": 823, "shi</w>": 3533, "shia</w>": 23791, "shibu": 36177, "shibuya</w>": 41623, "shie</w>": 26638, "shiel": 33413, "shield</w>": 8670, "shields</w>": 19085, "shies</w>": 35312, "shif": 35317, "shift": 43767, "shift</w>": 6905, "shifted</w>": 34429, "shifter</w>": 48944, "shifting</w>": 21992, "shifts</w>": 23957, "shik</w>": 36980, "shil": 14370, "shill": 32121, "shill</w>": 30090, "shilpa": 47062, "shilpa</w>": 40690, "shim": 11986, "shim</w>": 32780, "shima</w>": 14382, "shimano</w>": 48904, "shimi</w>": 40517, "shimmer</w>": 38792, "shin": 5664, "shin</w>": 11784, "shinde</w>": 41516, "shine": 17582, "shine</w>": 3780, "shinee</w>": 19660, "shines</w>": 16015, "shing": 38641, "shing</w>": 1743, "shining</w>": 10485, "shino": 43074, "shiny</w>": 12190, "ship": 7645, "ship</w>": 1158, "shipment</w>": 28553, "shipp": 34709, "shipped</w>": 15279, "shippers</w>": 44789, "shipping</w>": 5721, "ships</w>": 3262, "shipwreck</w>": 48878, "shipy": 26828, "shipyard</w>": 31273, "shir": 1956, "shiraz</w>": 35618, "shire": 11975, "shire</w>": 2968, "shirehour</w>": 32456, "shirley</w>": 18189, "shiro</w>": 26048, "shirt": 27576, "shirt</w>": 2523, "shirtless</w>": 28959, "shirts</w>": 5803, "shistory</w>": 34979, "shiv": 18042, "shiv</w>": 37121, "shiva": 33881, "shiva</w>": 21174, "shka</w>": 38944, "shld</w>": 49359, "shma</w>": 48074, "shment</w>": 8802, "shments</w>": 18822, "sho": 719, "sho</w>": 13756, "shock": 19617, "shock</w>": 8736, "shocked</w>": 15787, "shocker</w>": 37971, "shockey</w>": 22258, "shocking</w>": 13394, "shocks</w>": 31886, "shoe": 16308, "shoe</w>": 7342, "shoes": 49391, "shoes</w>": 4079, "shol": 21472, "sholm</w>": 44139, "shome</w>": 42701, "shon": 19526, "shon</w>": 37621, "shone</w>": 47173, "shoo": 1975, "shook</w>": 20730, "shoops</w>": 29956, "shoot": 12531, "shoot</w>": 3704, "shooter</w>": 13645, "shooters</w>": 31902, "shooting</w>": 3992, "shootings</w>": 26753, "shootout</w>": 20666, "shoots</w>": 14144, "shop": 5738, "shop</w>": 1557, "shopify</w>": 47949, "shoplocal</w>": 21775, "shopp": 38486, "shoppe</w>": 38236, "shopped</w>": 28088, "shopper</w>": 24346, "shoppers</w>": 22316, "shopping": 42101, "shopping</w>": 4266, "shops</w>": 6467, "shopsmall</w>": 35942, "shor": 3209, "shore": 14717, "shore</w>": 5928, "shored": 33140, "shoreditch</w>": 35042, "shoreline</w>": 34807, "shores</w>": 18102, "short": 6803, "short</w>": 3005, "shortage</w>": 19910, "shortages</w>": 38730, "shortcuts</w>": 45793, "shorten</w>": 41711, "shorter</w>": 20350, "shortest</w>": 33717, "shortfilm</w>": 37204, "shorth": 37397, "shortlist</w>": 28163, "shortlisted</w>": 20631, "shortly</w>": 11967, "shorts</w>": 9680, "shorty</w>": 33502, "shot": 9805, "shot</w>": 2000, "shotel</w>": 42365, "shotgun</w>": 21643, "shots</w>": 5342, "shou": 3890, "shoul": 29847, "should": 14947, "should</w>": 1535, "shoulder</w>": 8476, "shoulders</w>": 18738, "shouldn</w>": 9416, "shour</w>": 20025, "shouse</w>": 28671, "shout": 7335, "shout</w>": 5214, "shouted</w>": 44397, "shouting</w>": 26464, "shoutout</w>": 8274, "shouts</w>": 26709, "shovel</w>": 31778, "show": 2133, "show</w>": 1080, "showbiz</w>": 34156, "showcas": 14290, "showcase</w>": 7265, "showcased</w>": 35786, "showcases</w>": 26266, "showcasing</w>": 17036, "showdown</w>": 15576, "showed</w>": 7150, "shower</w>": 7777, "showers</w>": 9893, "showing</w>": 3649, "shown</w>": 8506, "showroom</w>": 16821, "shows</w>": 2665, "showtime": 40576, "showtime</w>": 15442, "showyour": 46733, "shp</w>": 38341, "shq</w>": 21145, "shr": 10118, "shra": 21360, "shradd": 28172, "shraddha": 35208, "shraddhakapoor</w>": 40385, "shre": 12101, "shred": 19756, "shred</w>": 33017, "shredded</w>": 31772, "shredding</w>": 45534, "shree</w>": 37410, "shrek</w>": 35009, "shrews": 26411, "shrewsbury</w>": 30921, "shri": 8838, "shri</w>": 11424, "shrimp</w>": 12727, "shrin": 24865, "shrine</w>": 16156, "shrink</w>": 34957, "shrinking</w>": 41243, "shrm</w>": 44163, "shro": 15259, "shroff</w>": 32081, "shrop": 22630, "shropshire</w>": 26344, "shru": 14911, "shrub</w>": 41464, "shrubs</w>": 47975, "shrun": 46767, "shs</w>": 16184, "sht</w>": 44210, "shti</w>": 38927, "shu": 2872, "shu</w>": 17651, "shua</w>": 33771, "shub</w>": 40552, "shud</w>": 45782, "shuff": 42641, "shuffle</w>": 21681, "shui</w>": 45473, "shuk": 29927, "shukla</w>": 46829, "shul": 30721, "shum": 37383, "shun": 24479, "shun</w>": 39594, "shur": 41032, "shut": 8702, "shut</w>": 8282, "shutdown</w>": 16051, "shutout</w>": 24385, "shuts</w>": 28313, "shutt": 31866, "shutter": 36235, "shutter</w>": 33902, "shutters</w>": 46894, "shutting</w>": 31383, "shuttle</w>": 15842, "shwar</w>": 41640, "shy": 22678, "shy</w>": 9682, "si": 564, "si</w>": 2990, "sia</w>": 2357, "siam": 29686, "siam</w>": 48248, "siamese</w>": 43161, "sian": 28510, "sian</w>": 6221, "sians</w>": 26583, "sias</w>": 28645, "siber": 22206, "siberia</w>": 39969, "siberian</w>": 34058, "sibl": 14338, "sible</w>": 14507, "sibling": 43060, "sibling</w>": 23779, "siblings</w>": 17156, "sic": 8278, "sic</w>": 1118, "sica</w>": 34125, "sical</w>": 33875, "sichuan</w>": 48950, "sicilian</w>": 45292, "sicily</w>": 23179, "sick": 11143, "sick</w>": 5359, "sickest</w>": 47972, "sickle</w>": 41459, "sickness</w>": 28898, "sics</w>": 26297, "sid": 10117, "sid</w>": 15119, "sidd": 19842, "siddi": 35227, "side": 5869, "side</w>": 1145, "sided</w>": 21061, "sidekick</w>": 44683, "sidel": 43557, "sideline</w>": 32056, "sidelines</w>": 31046, "sider</w>": 30581, "siders</w>": 41249, "sides</w>": 7578, "sideshow</w>": 46789, "sidewalk</w>": 23278, "sidewalks</w>": 43583, "sideways</w>": 35593, "siding</w>": 38758, "sidney</w>": 22598, "sie": 8533, "sie</w>": 5685, "sieg": 49203, "siege</w>": 18460, "siegel</w>": 48559, "siem": 18434, "siemens</w>": 30147, "siempre</w>": 44030, "siena</w>": 33336, "sienna</w>": 40373, "sier": 10028, "sier</w>": 7444, "sierra</w>": 13552, "siers</w>": 35923, "sies</w>": 16367, "siest</w>": 18323, "sif</w>": 29300, "sig": 872, "sig</w>": 19145, "sigh": 36303, "sigh</w>": 15505, "sighs</w>": 44579, "sight": 16897, "sight</w>": 6329, "sighted</w>": 33034, "sighting</w>": 17507, "sightings</w>": 30004, "sights</w>": 17364, "sightseeing</w>": 34210, "sigma": 45075, "sigma</w>": 15697, "sign": 5538, "sign</w>": 2292, "signage</w>": 21156, "signal</w>": 10781, "signaling</w>": 38492, "signalling</w>": 48426, "signals</w>": 17150, "signation</w>": 24347, "signature</w>": 9189, "signatures</w>": 21865, "signed</w>": 3163, "signee</w>": 39778, "signi": 34023, "signific": 6374, "significance</w>": 23769, "significant</w>": 8735, "significantly</w>": 16187, "signing</w>": 4401, "signingday</w>": 40282, "signings</w>": 27731, "signs</w>": 4659, "signup</w>": 40791, "sigue</w>": 49401, "sii</w>": 36672, "sik": 19974, "sik</w>": 22413, "sika</w>": 31144, "sikh</w>": 21829, "sikhs</w>": 45426, "sil": 1556, "sil</w>": 8315, "sila</w>": 41754, "sile": 37620, "silen": 39048, "silence</w>": 8462, "silenced</w>": 45415, "silent": 30352, "silent</w>": 8487, "silently</w>": 42640, "silhou": 20589, "silhouette</w>": 26149, "silic": 23830, "silicon": 32412, "silicon</w>": 17888, "silicone</w>": 28221, "silk": 25891, "silk</w>": 9743, "silky</w>": 29554, "sill": 42468, "sill</w>": 48024, "silly</w>": 11883, "silon</w>": 31841, "sils</w>": 39708, "silva</w>": 16489, "silve": 37697, "silver": 7525, "silver</w>": 3467, "silverado</w>": 46160, "silverstone</w>": 29666, "silvia</w>": 37289, "sim": 5026, "sim</w>": 10740, "sima</w>": 35871, "simba</w>": 39492, "simcoe</w>": 47148, "sime": 28329, "simi</w>": 38073, "simil": 7202, "similar</w>": 8547, "similarities</w>": 34716, "simm": 13001, "simmons</w>": 14699, "simo</w>": 37171, "simon": 8796, "simon</w>": 6668, "simona</w>": 46277, "simone</w>": 19062, "simons</w>": 33097, "simp": 2542, "simple": 19018, "simple</w>": 4129, "simpler</w>": 35489, "simplest</w>": 39588, "simpli": 16868, "simplicity</w>": 21262, "simplified</w>": 36647, "simplify</w>": 35479, "simply": 25637, "simply</w>": 6151, "simpson": 41805, "simpson</w>": 11750, "simpsons</w>": 21092, "sims</w>": 14021, "simul": 9845, "simulated</w>": 46395, "simulation</w>": 18610, "simulator</w>": 20821, "simultaneous</w>": 48816, "simultaneously</w>": 28575, "sin": 1303, "sin</w>": 3421, "sina</w>": 19541, "sinai</w>": 33226, "sinatra</w>": 27262, "sinc</w>": 30464, "since</w>": 1855, "sincere": 24513, "sincere</w>": 24886, "sincerely</w>": 25673, "sinclair</w>": 23100, "sind": 39598, "sind</w>": 30877, "sindh</w>": 20754, "sindia</w>": 48038, "sine": 22741, "sine</w>": 33793, "sinfo</w>": 47178, "sing": 1387, "sing</w>": 1197, "singapo": 27861, "singapore": 28879, "singapore</w>": 6754, "singer": 33880, "singer</w>": 5108, "singers</w>": 15613, "singersongwriter</w>": 44585, "singh": 19445, "singh</w>": 5715, "singing</w>": 5864, "single": 19524, "single</w>": 2688, "singles</w>": 12025, "singleton</w>": 46247, "singly</w>": 16619, "sings</w>": 13635, "singul": 34003, "singular</w>": 44009, "singularity</w>": 48410, "sinha</w>": 29416, "sini": 41781, "sini</w>": 26319, "sinister</w>": 31313, "sink": 37232, "sink</w>": 14551, "sinking</w>": 27949, "sinks</w>": 32710, "sinn</w>": 36315, "sinner</w>": 45380, "sinners</w>": 43436, "sino</w>": 29759, "sins</w>": 9345, "sinthe": 30737, "sinu": 37351, "sinus</w>": 47535, "sio</w>": 10807, "siob": 40954, "siology</w>": 46315, "sion": 5676, "sion</w>": 1015, "sional</w>": 14533, "sionally</w>": 30754, "sions</w>": 4060, "sioux": 44695, "sioux</w>": 24954, "sip</w>": 16096, "sipping</w>": 28527, "sir": 10708, "sir</w>": 3846, "sire</w>": 28450, "siren</w>": 33026, "sirens</w>": 35907, "siri": 13986, "siri</w>": 18394, "sirius": 23574, "sirius</w>": 34999, "siriusxm</w>": 29833, "sirloin</w>": 46828, "sis": 18132, "sis</w>": 2580, "sisd</w>": 27132, "sisi</w>": 37892, "siss": 42929, "sissy</w>": 27564, "sist</w>": 20520, "sista</w>": 37448, "sister": 17417, "sister</w>": 3677, "sisterhood</w>": 37313, "sisters</w>": 6404, "sit": 7387, "sit</w>": 4037, "sitcom</w>": 30426, "site": 26792, "site</w>": 1988, "sites</w>": 7236, "sith</w>": 41499, "sito</w>": 42613, "sits</w>": 12726, "sitt": 42988, "sitter</w>": 40777, "sittin</w>": 40887, "sitting</w>": 4919, "situ": 5562, "situ</w>": 42536, "situated</w>": 22030, "situation</w>": 7144, "situations</w>": 19096, "sity": 38177, "sity</w>": 5477, "siu</w>": 40174, "sium</w>": 8090, "sius</w>": 27595, "siva</w>": 20991, "sivan</w>": 36931, "sive": 23572, "sive</w>": 1875, "sively</w>": 10343, "siveness</w>": 39667, "sives</w>": 23896, "sivity</w>": 42738, "siwon</w>": 29055, "six": 5968, "six</w>": 4093, "sixers</w>": 25941, "sixteen</w>": 28677, "sixth</w>": 12909, "sixties</w>": 44948, "sixty</w>": 32588, "siya</w>": 44440, "size": 38377, "size</w>": 3235, "sized</w>": 9832, "sizes</w>": 10253, "sizing</w>": 28330, "sizz": 23778, "sizzle</w>": 47890, "sizzling</w>": 35799, "sj": 7536, "sj</w>": 16010, "sjo": 42012, "sk": 909, "sk</w>": 2058, "ska</w>": 7495, "skag": 31948, "skan": 46772, "skar": 27587, "skar</w>": 26835, "skate": 13740, "skate</w>": 12745, "skateboard</w>": 31777, "skateboarding</w>": 31352, "skater</w>": 30337, "skaters</w>": 39824, "skates</w>": 31479, "skc</w>": 44551, "ske": 6261, "ske</w>": 25516, "skel": 36564, "skelet": 27075, "skeletal</w>": 37369, "skeleton</w>": 20062, "skeletons</w>": 48874, "skell</w>": 40801, "skep": 27772, "skeptical</w>": 44934, "sker": 37640, "sker</w>": 33600, "sket": 3744, "sketch": 11767, "sketch</w>": 5269, "sketchbook</w>": 18899, "sketched</w>": 38581, "sketches</w>": 17622, "sketching</w>": 23228, "sketchy</w>": 41582, "skey</w>": 37453, "ski": 3327, "ski</w>": 3428, "skid": 36574, "skid</w>": 32099, "skier</w>": 42585, "skies</w>": 7244, "skiing</w>": 14400, "skil": 24543, "skill": 15598, "skill</w>": 10604, "skilled</w>": 17535, "skillet</w>": 40568, "skills</w>": 4113, "skim": 33191, "skin": 5821, "skin</w>": 3575, "skincare</w>": 12648, "skine</w>": 37300, "sking": 46215, "skinned</w>": 42199, "skinner</w>": 30261, "skinny": 42729, "skinny</w>": 15457, "skins</w>": 11594, "skip": 39793, "skip</w>": 14296, "skipped</w>": 40639, "skipper</w>": 22226, "skipping</w>": 34867, "skir": 8919, "skirt</w>": 12386, "skirts</w>": 24840, "skis</w>": 32843, "skit</w>": 43573, "skitchen</w>": 42820, "skittles</w>": 43213, "sko": 15141, "sko</w>": 23493, "skoda</w>": 38668, "skool</w>": 26743, "skril": 43149, "skrillex</w>": 43651, "sks</w>": 48136, "sku": 10836, "skul": 17561, "skull": 34068, "skull</w>": 12092, "skulls</w>": 31804, "skunk</w>": 42194, "sky": 3075, "sky</w>": 2390, "skybet": 45540, "skye</w>": 21475, "skyl": 43554, "skylar</w>": 45411, "skyline</w>": 14606, "skymap</w>": 41734, "skynews</w>": 40977, "skype</w>": 17069, "skyrim</w>": 33693, "skysports": 39845, "skysports</w>": 46725, "skywalker</w>": 32936, "sl": 2621, "sl</w>": 7489, "sla": 2725, "sla</w>": 26707, "slab</w>": 24241, "slabs</w>": 42818, "slack": 37108, "slack</w>": 30142, "slade</w>": 33546, "slain</w>": 35972, "slalom</w>": 43540, "slam": 14891, "slam</w>": 10131, "slammed</w>": 29772, "slams</w>": 18907, "slan": 44663, "slan</w>": 47193, "sland</w>": 11294, "slang</w>": 33655, "slap": 48830, "slap</w>": 21751, "slapped</w>": 38861, "slaps</w>": 46796, "slash</w>": 19749, "slat": 38966, "slate</w>": 17919, "slated</w>": 36094, "slater</w>": 25968, "slaugh": 26782, "slaughter</w>": 19815, "slaughtered</w>": 46615, "slav</w>": 47292, "slava</w>": 41797, "slave</w>": 14029, "slavery</w>": 15754, "slaves</w>": 23833, "slaw</w>": 28178, "slay": 48319, "slay</w>": 19380, "slayed</w>": 44870, "slayer</w>": 21605, "slaying</w>": 27812, "slays</w>": 45648, "slc</w>": 21972, "sle": 1709, "sleague</w>": 23336, "sled</w>": 28438, "sledge</w>": 48750, "slee": 17642, "slee</w>": 38977, "sleek</w>": 23187, "sleep": 4656, "sleep</w>": 3840, "sleeper</w>": 28709, "sleeping</w>": 6982, "sleepless</w>": 39779, "sleepover</w>": 39415, "sleeps</w>": 16610, "sleepy": 32572, "sleepy</w>": 14497, "sleet</w>": 36948, "sleeve": 35270, "sleeve</w>": 10536, "sleeveless</w>": 38049, "sleeves</w>": 19691, "sleg</w>": 47650, "sleigh</w>": 30865, "slender</w>": 40331, "slept</w>": 20388, "sler</w>": 14066, "sley": 17198, "sley</w>": 6496, "sli": 1811, "sli</w>": 44824, "slic": 19692, "slice</w>": 13431, "sliced</w>": 28121, "slices</w>": 28424, "slick</w>": 18341, "slide": 27828, "slide</w>": 8837, "slider</w>": 37861, "sliders</w>": 40700, "slides</w>": 15939, "slideshow</w>": 42817, "sliding</w>": 21468, "slife</w>": 15448, "sliga</w>": 21080, "slight</w>": 14297, "slightly</w>": 8456, "sligo</w>": 30424, "slike": 38744, "slim": 35226, "slim</w>": 12364, "slime</w>": 29107, "sling": 28021, "sling</w>": 32607, "slinger</w>": 47269, "slions</w>": 43363, "slip": 39785, "slip</w>": 12105, "slipknot</w>": 41816, "slipped</w>": 30344, "slipper</w>": 39644, "slippers</w>": 26509, "slippery</w>": 30814, "slipping</w>": 36301, "slips</w>": 30632, "slist</w>": 33749, "slit</w>": 47011, "slive</w>": 31652, "slo": 4303, "slo</w>": 36083, "sloan</w>": 29110, "sloane</w>": 41553, "slogan</w>": 23398, "slogans</w>": 42795, "slope</w>": 22769, "slopes</w>": 24066, "sloppy</w>": 36154, "slot</w>": 14500, "sloth</w>": 30007, "slots</w>": 19238, "slou": 48493, "slovak": 23315, "slovakia</w>": 25994, "sloven": 17018, "slovenia</w>": 21037, "slow": 6674, "slow</w>": 5444, "slowdown</w>": 38421, "slowed</w>": 43793, "slower</w>": 29181, "slowing</w>": 29839, "slowly</w>": 9568, "slows</w>": 46855, "slp</w>": 45599, "slr</w>": 21325, "sls</w>": 33651, "slt</w>": 39283, "sltd</w>": 36388, "slu": 7224, "slu</w>": 47456, "slug</w>": 34190, "slugger</w>": 48671, "slum</w>": 46754, "slumber</w>": 44295, "slump</w>": 35588, "slur": 30476, "slush</w>": 39815, "slv</w>": 45526, "sly": 28145, "sly</w>": 21062, "sm": 978, "sm</w>": 2764, "sma": 4357, "sma</w>": 11854, "smack": 21280, "smack</w>": 30026, "smackdown</w>": 26138, "smafia</w>": 47686, "smag</w>": 32212, "smal": 48379, "small": 5244, "small</w>": 2442, "smallbiz": 41724, "smallbiz</w>": 18987, "smallbusiness</w>": 21316, "smalle": 18490, "smaller</w>": 12431, "smallest</w>": 18686, "smalls</w>": 41696, "sman</w>": 9612, "smar": 3201, "smart": 5383, "smart</w>": 4115, "smartcities</w>": 34822, "smartcity</w>": 33973, "smarter</w>": 18990, "smartest</w>": 37092, "smarthome</w>": 47726, "smartphone</w>": 11290, "smartphones</w>": 22212, "smartwatch</w>": 35798, "smash": 17258, "smash</w>": 10332, "smashbros</w>": 44897, "smashed</w>": 18410, "smashes</w>": 45657, "smashing</w>": 19632, "smatter</w>": 16537, "smb</w>": 30446, "smc": 31375, "smc</w>": 28312, "smd</w>": 34582, "sme": 11758, "sme</w>": 15650, "smear</w>": 37546, "smel": 28476, "smell</w>": 9688, "smelling</w>": 32493, "smells</w>": 14668, "smelly</w>": 46145, "smen</w>": 15961, "smer": 48526, "smere</w>": 39629, "smes</w>": 26141, "smg</w>": 46876, "smh</w>": 9623, "smi": 5655, "smi</w>": 40049, "smil": 33937, "smile": 27641, "smile</w>": 3490, "smiled</w>": 34362, "smiles</w>": 8726, "smiley</w>": 22925, "smiling</w>": 9200, "smir": 24667, "smith": 10527, "smith</w>": 2915, "smiths</w>": 27872, "smithson": 25372, "smithsonian</w>": 31209, "smm</w>": 19510, "smma</w>": 42370, "smo": 2513, "smo</w>": 13437, "smobile</w>": 38923, "smog</w>": 44425, "smoke": 20381, "smoke</w>": 6664, "smoked</w>": 11161, "smoker</w>": 32348, "smokers</w>": 29571, "smokes</w>": 40336, "smokey</w>": 23670, "smokin</w>": 32825, "smoking</w>": 9038, "smoky</w>": 25549, "smol": 29939, "smol</w>": 40403, "smoo": 5430, "smooth": 10958, "smooth</w>": 8990, "smoother</w>": 44271, "smoothie</w>": 16668, "smoothies</w>": 34458, "smoothly</w>": 32380, "smore</w>": 48323, "smp</w>": 32260, "smriti": 49227, "sms</w>": 10409, "smt</w>": 26672, "smtown</w>": 26072, "smu": 10878, "smu</w>": 30458, "smug</w>": 41021, "smugg": 28130, "smuggling</w>": 34146, "smur": 24708, "smusic</w>": 19191, "smw</w>": 44929, "smx</w>": 46699, "smy": 14381, "smyth</w>": 44822, "sn": 1672, "sn</w>": 5844, "sna": 4032, "snack": 47548, "snack</w>": 10039, "snacking</w>": 46474, "snacks</w>": 12349, "snag": 34789, "snag</w>": 28043, "snagged</w>": 48534, "snail</w>": 23132, "snails</w>": 34928, "snake": 30133, "snake</w>": 8798, "snakes</w>": 19605, "snap": 4578, "snap</w>": 7404, "snapback</w>": 31234, "snapchat</w>": 7799, "snapmatic</w>": 45907, "snapp": 10185, "snapped</w>": 15543, "snapper</w>": 31677, "snapping</w>": 31581, "snaps</w>": 16890, "snapshot</w>": 18243, "snar": 30810, "snare</w>": 40651, "snat": 18457, "snatch</w>": 35302, "snatched</w>": 44821, "snation</w>": 14362, "snazzy</w>": 48963, "snc</w>": 39918, "sne": 3791, "sne</w>": 46503, "sneak": 27871, "sneak</w>": 6917, "sneaker": 31698, "sneaker</w>": 24781, "sneakers</w>": 17397, "sneaking</w>": 34633, "sneakpeek</w>": 47831, "sneaks</w>": 40926, "sneaky</w>": 21293, "snee": 42095, "snell</w>": 46410, "sner</w>": 31424, "snes</w>": 26667, "snews</w>": 18623, "snf</w>": 47651, "sng</w>": 41549, "snhl</w>": 43093, "sni": 7186, "sni</w>": 35570, "snickers</w>": 49127, "sniff</w>": 37841, "snip</w>": 42954, "sniper</w>": 22157, "snippet</w>": 37531, "snippets</w>": 44001, "snl</w>": 16011, "sno": 8567, "sno</w>": 17802, "snoo": 11352, "snooker</w>": 25657, "snoop": 44503, "snoop</w>": 27754, "snoopdogg</w>": 48388, "snoopy</w>": 41967, "snooze</w>": 40718, "snor": 16590, "snoring</w>": 44560, "snorkel": 44285, "snorkeling</w>": 48103, "snow": 3880, "snow</w>": 2583, "snowball</w>": 39254, "snowboard</w>": 33403, "snowboarding</w>": 32397, "snowday</w>": 37982, "snowden</w>": 32154, "snowdon</w>": 47107, "snowdonia</w>": 36088, "snowed</w>": 45073, "snowfall</w>": 21714, "snowflake</w>": 33447, "snowflakes</w>": 38618, "snowing</w>": 21443, "snowman</w>": 22668, "snowstorm</w>": 38777, "snowy</w>": 14191, "snp</w>": 15301, "sns</w>": 36343, "snsd</w>": 27961, "snt</w>": 34834, "snu": 9694, "snuck</w>": 36522, "snug</w>": 45169, "snuggle</w>": 31327, "snuggles</w>": 48165, "sny": 17526, "snyder</w>": 22106, "snz</w>": 37678, "so": 759, "so</w>": 706, "soa</w>": 39584, "soak</w>": 24839, "soaked</w>": 26592, "soaking</w>": 26750, "soap": 26086, "soap</w>": 11088, "soaps</w>": 40958, "soar": 48997, "soar</w>": 22241, "soaring</w>": 27968, "soars</w>": 41348, "sob": 24900, "sob</w>": 35507, "sobbing</w>": 36691, "sober": 30969, "sober</w>": 24487, "sobre</w>": 42768, "sobri": 49308, "sobs</w>": 43636, "soc": 3253, "soc</w>": 7741, "soca</w>": 49239, "socal": 46470, "socal</w>": 20450, "soccer": 16268, "soccer</w>": 4233, "socceroos</w>": 41997, "socent</w>": 30831, "sochi</w>": 21014, "soci": 1720, "social": 4803, "social</w>": 2346, "socialism</w>": 23372, "socialist</w>": 18450, "socialists</w>": 43839, "socially</w>": 24555, "socialmedi": 23813, "socialmedia</w>": 9600, "socialmediamarketing</w>": 31790, "societal</w>": 40058, "societies</w>": 25855, "society</w>": 3757, "socio": 44319, "socio</w>": 42790, "sociology</w>": 32373, "sock": 29801, "sock</w>": 18277, "socket</w>": 28657, "socks</w>": 8774, "socorro</w>": 46409, "socute</w>": 45086, "sod</w>": 31435, "soda</w>": 13533, "sodium</w>": 29070, "soe": 44136, "soe</w>": 25498, "soever</w>": 34024, "sof": 1571, "sof</w>": 41187, "sofa</w>": 15723, "soff": 35290, "soff</w>": 30684, "sofficial</w>": 20563, "sofi": 41537, "sofia</w>": 18914, "sofinstagram</w>": 17301, "soft": 12778, "soft</w>": 3773, "softball</w>": 8369, "softer</w>": 44462, "softhe": 23127, "softly</w>": 34958, "software": 35941, "software</w>": 5847, "softwitter</w>": 11311, "sog</w>": 44775, "soggy</w>": 41168, "sohn</w>": 49267, "soho": 47749, "soho</w>": 17592, "soi</w>": 40495, "soil": 33417, "soil</w>": 9216, "soils</w>": 34891, "soir</w>": 43427, "sok</w>": 43456, "sol": 1175, "sol</w>": 9941, "sola</w>": 40086, "solace</w>": 42567, "solar": 16990, "solar</w>": 5199, "solareclipse</w>": 44727, "sold": 33116, "sold</w>": 3939, "soldi": 5098, "soldier</w>": 9355, "soldiers</w>": 7547, "sole": 10519, "sole</w>": 8576, "soleil</w>": 33148, "solely</w>": 27913, "solent</w>": 47783, "soles</w>": 22682, "soli": 3911, "solic": 19369, "solicitor</w>": 45647, "solicitors</w>": 46000, "solid": 30626, "solid</w>": 6148, "solidar": 10415, "solidarity</w>": 10983, "solidi": 46136, "solids</w>": 49070, "solihull</w>": 45293, "solit": 37039, "solitaire</w>": 47257, "solitary</w>": 33094, "solitude</w>": 33199, "solo": 17626, "solo</w>": 5797, "soloist</w>": 46391, "solom": 15768, "solomon</w>": 19785, "solos</w>": 44868, "solst": 20298, "solstice</w>": 21359, "solu": 2487, "solution</w>": 4575, "solutions</w>": 5140, "solve</w>": 8917, "solved</w>": 13451, "solves</w>": 42740, "solving</w>": 15581, "som": 734, "som</w>": 10672, "soma</w>": 36170, "somal": 40281, "somali</w>": 26231, "somalia</w>": 17051, "somaliland</w>": 43315, "some": 1132, "some</w>": 836, "somebody</w>": 8305, "someday</w>": 17127, "somehow</w>": 11735, "someone</w>": 2100, "somer": 9656, "somerhalder</w>": 33990, "somerset</w>": 14926, "somerville</w>": 41409, "somes</w>": 38124, "somethin</w>": 33541, "something": 28316, "something</w>": 2006, "sometime</w>": 21464, "sometimes</w>": 4237, "somewhat</w>": 17864, "somewhere</w>": 8119, "somm": 42726, "somme</w>": 30625, "sommer</w>": 44954, "somos</w>": 24951, "son": 1176, "son</w>": 825, "sona</w>": 21249, "sonam": 40096, "sonar</w>": 48235, "sonata</w>": 37009, "sone</w>": 29599, "song": 6868, "song</w>": 2295, "songs</w>": 4641, "songwriter</w>": 13034, "songwriters</w>": 39583, "songwriting</w>": 33567, "songz</w>": 49302, "soni": 34899, "soni</w>": 35911, "sonia</w>": 20409, "sonic": 23785, "sonic</w>": 9132, "sonics</w>": 48511, "sonja</w>": 46102, "sonline</w>": 23412, "sonny": 43000, "sonny</w>": 20880, "sono</w>": 44109, "sonom": 48596, "sonoma</w>": 26269, "sons</w>": 5502, "sonsof": 46676, "sont</w>": 31063, "sonthe": 40923, "sony": 16042, "sony</w>": 8748, "sonya</w>": 39172, "soo": 5517, "soo</w>": 8602, "soom": 39771, "soon": 27559, "soon</w>": 1745, "sooner</w>": 18968, "sooners</w>": 30449, "sooo</w>": 11526, "soooo</w>": 13658, "sooooo</w>": 21199, "soooooo</w>": 34859, "soor": 46698, "soothe</w>": 44424, "soothing</w>": 27730, "sop": 3974, "sop</w>": 19194, "soph</w>": 34963, "sophi": 6192, "sophia</w>": 16790, "sophie": 38648, "sophie</w>": 12357, "sophistic": 17646, "sophisticated</w>": 20833, "sophom": 13696, "sophomore</w>": 15242, "sophomores</w>": 47645, "soprano</w>": 28880, "soproud</w>": 44479, "sor": 1852, "sor</w>": 16872, "sora</w>": 38719, "sorbet</w>": 39994, "sore": 43330, "sore</w>": 15454, "sored</w>": 6731, "soren": 38907, "sorg</w>": 28152, "sori": 38588, "sorority</w>": 30059, "soros</w>": 33248, "sorren": 44012, "sorrow</w>": 28020, "sorrows</w>": 47924, "sorry": 25745, "sorry</w>": 3675, "sorrynotsorry</w>": 37105, "sort</w>": 8450, "sorta</w>": 34700, "sorted</w>": 13221, "sorting</w>": 19198, "sorts</w>": 12577, "sory</w>": 16257, "sos": 25145, "sos</w>": 5792, "sosa</w>": 45433, "sosfam</w>": 47709, "sot": 41542, "sot</w>": 34116, "sothe": 32145, "sotho</w>": 45496, "soto</w>": 27947, "sotto</w>": 26047, "sotu</w>": 32286, "sou": 1101, "sou</w>": 24293, "sought</w>": 18874, "soul": 8701, "soul</w>": 3755, "soulful</w>": 30196, "soulmate</w>": 38130, "souls</w>": 10951, "soun": 19474, "sound": 5236, "sound</w>": 3608, "soundcheck</w>": 31394, "soundcloud</w>": 15190, "sounded</w>": 28287, "sounders</w>": 44933, "sounding</w>": 21351, "sounds</w>": 5694, "soundtrack</w>": 11389, "soup</w>": 7077, "soups</w>": 45052, "sour": 2235, "sour</w>": 12049, "source": 23698, "source</w>": 3634, "sourced</w>": 23340, "sources</w>": 5124, "sourcing</w>": 19574, "sourdough</w>": 29921, "souri</w>": 11674, "sous</w>": 32093, "sousa</w>": 46296, "sout": 38156, "sout</w>": 32732, "south": 2938, "south</w>": 2045, "southafrica</w>": 15184, "southampton</w>": 15767, "southbank</w>": 44173, "southbound</w>": 22932, "southeast</w>": 13942, "southeastern</w>": 26813, "southend</w>": 25583, "souther": 33330, "southern": 17704, "southern</w>": 5036, "southgate</w>": 47262, "southkorea</w>": 43552, "southport</w>": 37446, "southside</w>": 36436, "southsudan</w>": 30419, "southwark</w>": 39098, "southwe": 46443, "southwest</w>": 13320, "southwestern</w>": 30157, "souven": 20210, "souvenir</w>": 24811, "souvenirs</w>": 48460, "souza</w>": 29424, "sov</w>": 29737, "sover</w>": 31876, "sovere": 17736, "sovereign": 29418, "sovereign</w>": 26337, "sovereignty</w>": 31701, "soviet</w>": 14274, "sow</w>": 33089, "sowe": 36130, "soweto</w>": 47070, "sown</w>": 49369, "sox": 39556, "sox</w>": 8657, "soy": 16524, "soy</w>": 15010, "soybean</w>": 34606, "soybeans</w>": 40840, "soyu": 39578, "soyuz</w>": 43842, "sp": 588, "sp</w>": 4393, "spa": 7852, "spa</w>": 6692, "spac": 10336, "space": 7857, "space</w>": 2138, "spacecraft</w>": 25940, "spaces</w>": 9006, "spaceship</w>": 34317, "spacex</w>": 22511, "spacey</w>": 48770, "spacious</w>": 24769, "spad": 45362, "spade</w>": 32562, "spades</w>": 48368, "spaghetti</w>": 18440, "spain</w>": 5083, "spal": 26018, "spam": 29712, "spam</w>": 14624, "span": 4270, "span</w>": 14537, "spandex</w>": 41686, "spani": 16721, "spaniel</w>": 35435, "spanish": 29966, "spanish</w>": 6013, "spann</w>": 25323, "spanning</w>": 38638, "spans</w>": 45407, "spaper": 34548, "spar": 3378, "spar</w>": 34576, "spare</w>": 12615, "spares</w>": 39505, "spark": 9555, "spark</w>": 11047, "sparked</w>": 32647, "sparkle</w>": 18287, "sparkles</w>": 36410, "sparkling</w>": 17893, "sparkly</w>": 30542, "sparks</w>": 15046, "sparky</w>": 47198, "sparring</w>": 42161, "sparrow</w>": 22888, "spart": 10143, "sparta</w>": 38401, "spartan": 26582, "spartan</w>": 24225, "spartans</w>": 20457, "sparty</w>": 36477, "spas": 31714, "spati": 19200, "spatial</w>": 22022, "spaw": 31605, "spawn</w>": 29166, "spay": 40634, "spc</w>": 20492, "spca</w>": 37018, "spd": 37717, "spd</w>": 28307, "spdwy</w>": 45981, "spe": 876, "spe</w>": 36676, "speak": 20599, "speak</w>": 4208, "speake": 46077, "speaker": 25764, "speaker</w>": 4914, "speakers</w>": 7675, "speaking</w>": 3714, "speaks</w>": 5661, "spear": 23277, "spear</w>": 30420, "speare</w>": 43859, "spears</w>": 20242, "spec": 1711, "spec</w>": 18596, "speci": 1969, "special": 11422, "special</w>": 1689, "specialist</w>": 10630, "specialists</w>": 21719, "speciality</w>": 46904, "specialized</w>": 23265, "specializes</w>": 48533, "specially</w>": 4513, "specials</w>": 11983, "specialty</w>": 18262, "species</w>": 6330, "specific</w>": 10528, "specifically</w>": 17174, "specification</w>": 46394, "specifications</w>": 39705, "specified</w>": 48114, "specimen</w>": 30263, "specimens</w>": 42715, "specs</w>": 24093, "spect</w>": 3416, "spectac": 7242, "spectacle</w>": 34342, "spectacular</w>": 8404, "spectator</w>": 32372, "spectators</w>": 39306, "spective</w>": 6633, "spector</w>": 48676, "spectral</w>": 45441, "spectre</w>": 35998, "spectro": 27646, "spectrum</w>": 13532, "specul": 19209, "speculation</w>": 30898, "sped</w>": 38813, "spee": 4050, "speech": 19556, "speech</w>": 4902, "speeches</w>": 25208, "speechless</w>": 23152, "speed": 6860, "speed</w>": 4163, "speeding</w>": 27264, "speeds</w>": 22017, "speedway</w>": 11480, "speedy</w>": 21603, "spel": 41887, "spell": 22784, "spell</w>": 11230, "spelled</w>": 24339, "spelling</w>": 15614, "spells</w>": 25335, "spelt</w>": 38316, "spen": 5087, "spence</w>": 33324, "spencer": 27509, "spencer</w>": 10678, "spend</w>": 4664, "spending</w>": 5961, "spends</w>": 22508, "spent</w>": 4429, "speople</w>": 33035, "sper": 8213, "sper</w>": 15313, "sperm</w>": 35781, "sperson</w>": 22687, "spf</w>": 34973, "spg</w>": 34623, "sph": 28909, "sph</w>": 24684, "sphe": 33691, "spher": 18349, "sphere</w>": 6987, "spheres</w>": 37478, "spheric</w>": 21744, "sphin": 39237, "sphinx</w>": 46487, "spho": 20442, "sphoto</w>": 38594, "sphy": 43808, "spi": 3174, "spi</w>": 37080, "spic": 17264, "spice": 29761, "spice</w>": 10141, "spiced</w>": 24267, "spicer</w>": 37627, "spices</w>": 21194, "spicy</w>": 10915, "spide": 36801, "spider": 11963, "spider</w>": 7622, "spiderman": 39808, "spiderman</w>": 18427, "spiders</w>": 23141, "spidey</w>": 41706, "spie": 28573, "spie</w>": 28746, "spied</w>": 43998, "spiegel</w>": 45351, "spiel": 28435, "spiel</w>": 37690, "spielberg</w>": 37569, "spies</w>": 25374, "spieth</w>": 43254, "spike": 35306, "spike</w>": 15310, "spiked</w>": 47014, "spikes</w>": 29582, "spil": 47765, "spill": 43933, "spill</w>": 18006, "spilled</w>": 33206, "spilling</w>": 49006, "spills</w>": 35796, "spin": 6288, "spin</w>": 9226, "spinach</w>": 14747, "spinal</w>": 23925, "spine": 48221, "spine</w>": 19646, "sping</w>": 47113, "spinner</w>": 29924, "spinning</w>": 13987, "spino": 40848, "spinoff</w>": 42513, "spinrilla</w>": 46064, "spins</w>": 27243, "spion": 39604, "spionage</w>": 41838, "spir": 3745, "spiral</w>": 19873, "spiration</w>": 38126, "spire</w>": 27439, "spired</w>": 40650, "spires</w>": 46938, "spiri": 4024, "spirit": 18224, "spirit</w>": 4071, "spirited</w>": 34701, "spirits</w>": 13192, "spiritu": 7237, "spiritual": 46076, "spiritual</w>": 9473, "spirituality</w>": 22165, "spiro": 40085, "spit": 18115, "spit</w>": 23177, "spite</w>": 26060, "spitfire</w>": 31126, "spitting</w>": 40721, "spl": 2470, "spl</w>": 33052, "spla": 4809, "splac": 16059, "splace</w>": 38743, "splash": 43641, "splash</w>": 11879, "splat": 15733, "splatoon</w>": 22565, "splay</w>": 3169, "splen": 18552, "splend": 29861, "splendid</w>": 21016, "splendor</w>": 46262, "splin": 38090, "split": 25443, "split</w>": 9109, "splits</w>": 34897, "splitting</w>": 37210, "splus</w>": 40866, "spn": 35467, "spn</w>": 19414, "spnfamily</w>": 38566, "spo": 1261, "spo</w>": 21085, "spock</w>": 43918, "spoil</w>": 25600, "spoiled</w>": 21399, "spoiler</w>": 16512, "spoilers</w>": 18326, "spoils</w>": 42436, "spoilt</w>": 35358, "spokane</w>": 24528, "spoke": 13890, "spoke</w>": 6518, "spoken</w>": 12979, "spokesman</w>": 31632, "spokesperson</w>": 26234, "spol": 22476, "spol</w>": 8132, "spoli</w>": 34301, "spolice</w>": 37406, "spon": 1715, "spon</w>": 48216, "sponge": 22861, "sponge</w>": 24345, "spongebob</w>": 25089, "spons": 5597, "sponsor": 10424, "sponsor</w>": 7574, "sponsored</w>": 7197, "sponsoring</w>": 16181, "sponsors</w>": 11005, "sponsorship</w>": 17632, "spontaneous</w>": 32465, "spoo": 11248, "spooky</w>": 15369, "spool</w>": 49152, "spoon": 27001, "spoon</w>": 14024, "spoons</w>": 29661, "spor": 1475, "spor</w>": 33746, "sport": 4379, "sport</w>": 2364, "sporting": 32620, "sporting</w>": 8944, "sports": 6436, "sports</w>": 2054, "sportsc": 40114, "sportscar</w>": 46931, "sportscenter</w>": 39157, "sportsman</w>": 39020, "sportsmanship</w>": 34858, "sportsnet</w>": 34144, "sportswear</w>": 39747, "sporty</w>": 33346, "spot": 3223, "spot</w>": 3049, "spotify</w>": 7193, "spotlight</w>": 7901, "spots</w>": 7670, "spotted</w>": 4533, "spotter</w>": 30742, "spotting</w>": 15885, "spouse</w>": 24724, "spout</w>": 48993, "spp</w>": 47567, "spr": 1536, "spr</w>": 19417, "spra": 12966, "spraw": 46590, "spray": 37885, "spray</w>": 10449, "sprayed</w>": 40022, "spraying</w>": 39224, "spre": 18740, "spread": 20620, "spread</w>": 5284, "spreading</w>": 11821, "spreads</w>": 27579, "spree</w>": 21851, "spri": 35498, "spride</w>": 26685, "spring": 5166, "spring</w>": 2420, "springbreak</w>": 37753, "springer</w>": 30117, "springfield</w>": 16599, "springs</w>": 7308, "springst</w>": 32132, "springsteen</w>": 28367, "springtime</w>": 28285, "springtraining</w>": 49364, "springwatch</w>": 29239, "sprink": 15817, "sprinkle</w>": 42897, "sprinkler</w>": 48754, "sprinkles</w>": 37326, "sprint": 29248, "sprint</w>": 10751, "sprinter</w>": 36947, "sprints</w>": 36404, "sprite</w>": 32544, "spro": 13902, "spro</w>": 37403, "sproject</w>": 37802, "sproud</w>": 37686, "sprout</w>": 35863, "sprouts</w>": 25756, "spru": 17041, "spruce</w>": 23812, "sprung</w>": 32968, "sps</w>": 13869, "spu": 23566, "spun": 47922, "spun</w>": 32852, "spur": 15206, "spur</w>": 20361, "spurs</w>": 10916, "spursofficial</w>": 45290, "sput": 47521, "spx</w>": 20584, "spy": 13861, "spy</w>": 6656, "spyder</w>": 39952, "spying</w>": 36227, "sq": 9370, "sq</w>": 11590, "sqft</w>": 41912, "sql": 42759, "sql</w>": 18938, "sqm</w>": 47978, "sqn</w>": 41209, "squ": 1653, "squad": 13892, "squad</w>": 4234, "squadron</w>": 18579, "squads</w>": 36590, "square": 19314, "square</w>": 3999, "squared</w>": 32967, "squares</w>": 26972, "squash</w>": 13312, "squat": 44628, "squat</w>": 30680, "squats</w>": 40213, "sque": 9721, "sque</w>": 8097, "squee": 14420, "squeeze</w>": 21684, "squeezed</w>": 40413, "squid": 42057, "squid</w>": 22553, "squir": 9683, "squire</w>": 48090, "squirrel</w>": 14004, "squirrels</w>": 26623, "squish": 42607, "squishy</w>": 47001, "sr": 3437, "sr</w>": 5428, "srbachchan</w>": 32353, "src</w>": 23445, "sre": 17748, "sri": 11051, "sri</w>": 9276, "sridevi</w>": 46301, "srilan": 15559, "srilanka</w>": 16922, "srin": 26818, "srinagar</w>": 33671, "srini": 41899, "sriracha</w>": 42743, "sris": 27851, "srisri</w>": 32966, "srk": 44982, "srk</w>": 11216, "srl</w>": 33808, "srp</w>": 43004, "srs</w>": 41764, "srsly</w>": 44179, "srt</w>": 28139, "sru": 44152, "srugby</w>": 40526, "ss": 690, "ss</w>": 632, "ssa</w>": 6088, "ssal": 31330, "ssal</w>": 35936, "ssb</w>": 37511, "ssc": 21692, "ssc</w>": 20364, "ssd</w>": 23107, "sse": 9030, "sse</w>": 8938, "ssed": 38755, "ssed</w>": 1804, "ssel": 17402, "ssel</w>": 19373, "sseldorf</w>": 47792, "ssell</w>": 42388, "ssels</w>": 8355, "ssen": 39408, "ssen</w>": 22645, "sser</w>": 20445, "sses</w>": 1802, "ssett</w>": 44103, "ssf</w>": 33239, "ssg</w>": 40707, "ssh</w>": 48866, "ssi": 834, "ssi</w>": 14953, "ssia</w>": 22238, "ssian</w>": 31218, "ssible</w>": 47099, "ssic": 27774, "ssic</w>": 17077, "ssie</w>": 7572, "ssier</w>": 26422, "ssil</w>": 15026, "ssin</w>": 42660, "ssing</w>": 2112, "ssion": 16050, "ssion</w>": 1627, "ssional</w>": 13727, "ssionism</w>": 24787, "ssionist</w>": 27682, "ssions</w>": 4137, "ssive</w>": 2734, "ssively</w>": 28060, "ssl</w>": 32195, "ssler</w>": 30287, "ssly</w>": 24904, "ssn</w>": 39116, "ssnhq</w>": 47998, "sso": 25900, "sso</w>": 7914, "ssoccer</w>": 32546, "sson": 36124, "sson</w>": 7271, "ssor</w>": 35152, "ssp</w>": 31101, "ssr</w>": 39880, "sss</w>": 11176, "ssss": 30676, "ssss</w>": 15880, "sssss</w>": 24298, "sst</w>": 40396, "ssu</w>": 35351, "ssummit</w>": 49301, "ssus</w>": 31286, "ssw</w>": 36937, "ssy": 22519, "ssy</w>": 8661, "st": 522, "st</w>": 545, "sta": 1363, "sta</w>": 2745, "stab": 7726, "stab</w>": 29974, "stabbed</w>": 24534, "stabbing</w>": 25474, "stabil": 42576, "stabili": 23903, "stability</w>": 16716, "stable": 44427, "stable</w>": 10492, "stables</w>": 34218, "stac": 10175, "stacey": 41653, "stacey</w>": 24262, "stache</w>": 23616, "stack": 24723, "stack</w>": 11257, "stacked</w>": 24990, "stacking</w>": 39836, "stacks</w>": 24734, "stacy</w>": 26628, "stad": 15832, "stad</w>": 16485, "stade</w>": 38198, "stadi": 26587, "stadion</w>": 48815, "stadium</w>": 3390, "stadiums</w>": 38852, "stadt</w>": 22713, "staf": 2367, "staff": 31188, "staff</w>": 2813, "staffer</w>": 38494, "staffers</w>": 44994, "staffing</w>": 32932, "stafford</w>": 25006, "staffordshire</w>": 29198, "staffs</w>": 36098, "stag": 12088, "stag</w>": 20277, "stage": 23182, "stage</w>": 2170, "staged</w>": 19906, "stages</w>": 12297, "staggering</w>": 37315, "staging</w>": 27026, "stagram</w>": 19503, "stags</w>": 45936, "stain": 3933, "stain</w>": 14603, "stained</w>": 13751, "staining</w>": 32523, "stainless</w>": 12320, "stains</w>": 32008, "stair": 7240, "stair</w>": 17662, "staircase</w>": 22777, "stairs</w>": 9577, "stairway</w>": 45559, "stak": 39144, "stake": 15955, "stake</w>": 7937, "stakeholder</w>": 39122, "stakeholders</w>": 22968, "stakes</w>": 7519, "staking</w>": 47082, "stal": 3861, "stal</w>": 5535, "stale</w>": 42471, "stalert</w>": 25450, "stalin</w>": 28346, "stalk": 40826, "stalk</w>": 14878, "stalker</w>": 26777, "stalking</w>": 24721, "stalks</w>": 45886, "stall": 24636, "stall</w>": 12058, "stalled</w>": 40362, "stallion</w>": 28273, "stallions</w>": 44787, "stallone</w>": 40969, "stalls</w>": 25427, "stam": 4663, "stamatic</w>": 30904, "stamford</w>": 27843, "stamina</w>": 48753, "stamp": 28694, "stamp</w>": 12771, "stampcollecting</w>": 42852, "stamped</w>": 38356, "stampede</w>": 25384, "stamps</w>": 13827, "stan": 2203, "stan</w>": 2434, "stana</w>": 33311, "stanbul</w>": 11231, "stance": 48900, "stance</w>": 3542, "stances</w>": 15054, "stand": 1819, "stand</w>": 2087, "standalone</w>": 44887, "standard": 35780, "standard</w>": 5807, "standardi": 30247, "standards</w>": 9022, "standby</w>": 36184, "standing": 39934, "standing</w>": 2862, "standings</w>": 19835, "standoff</w>": 31821, "standout</w>": 23131, "standre": 48309, "stands</w>": 6446, "standup": 35108, "standup</w>": 24964, "standwith": 19540, "stanford": 36219, "stanford</w>": 15087, "stang</w>": 12536, "stani</w>": 38228, "stanis": 37711, "stanley": 19048, "stanley</w>": 10079, "stanleycup</w>": 28662, "stans</w>": 26564, "stant": 41576, "stant</w>": 4906, "stanton</w>": 25400, "stap": 10438, "staple</w>": 22695, "staples</w>": 23646, "stapleton</w>": 45228, "star": 993, "star</w>": 1565, "starbuck": 48519, "starbucks</w>": 9499, "starch</w>": 47837, "starcraft</w>": 48871, "stardom</w>": 44616, "stardust</w>": 34337, "stare</w>": 18094, "stared</w>": 47772, "stares</w>": 37916, "starfish</w>": 44283, "stargate</w>": 41099, "stargazing</w>": 49328, "staring</w>": 13800, "stark": 40446, "stark</w>": 15353, "starlight</w>": 32197, "starling</w>": 46205, "starmagic": 48023, "starplus</w>": 37815, "starr</w>": 19186, "starred</w>": 24180, "starrer</w>": 41311, "starring</w>": 6660, "starry</w>": 30963, "stars</w>": 2895, "starship</w>": 37166, "start": 17466, "start</w>": 1572, "started</w>": 2760, "starter</w>": 7800, "starters</w>": 22222, "starting</w>": 2530, "startrek": 30642, "startrek</w>": 15349, "starts</w>": 3105, "startu": 6996, "startup": 18049, "startup</w>": 5882, "startups</w>": 9056, "starve</w>": 46957, "starving</w>": 30473, "starwar": 17287, "starwars": 26239, "starwars</w>": 7887, "starz</w>": 25928, "stas</w>": 19866, "stash</w>": 27711, "stasy</w>": 45942, "stat": 3004, "stat</w>": 15216, "state": 3492, "state</w>": 1295, "statec": 33931, "stated</w>": 19629, "statedept</w>": 41458, "statefair</w>": 40305, "statement</w>": 5401, "statements</w>": 19513, "staten</w>": 38263, "stateof": 35195, "states": 22125, "states</w>": 4218, "statesman</w>": 35301, "stateu</w>": 44248, "statewide</w>": 29561, "stati": 9622, "static</w>": 16363, "stating</w>": 35147, "station": 13498, "station</w>": 2631, "stationary</w>": 29493, "stationed</w>": 47618, "stationery</w>": 33851, "stations</w>": 10051, "statistical</w>": 29349, "statistics</w>": 14165, "stats</w>": 7294, "statu": 32481, "statue</w>": 8222, "statues</w>": 24363, "status</w>": 6414, "stau": 28550, "staur": 3709, "stav": 20285, "stax</w>": 32235, "stay": 4714, "stay</w>": 2277, "stayed</w>": 13805, "staying</w>": 8993, "stays</w>": 13311, "staytuned</w>": 39285, "stc</w>": 29859, "std</w>": 30477, "ste": 795, "ste</w>": 2686, "stea</w>": 46614, "stead": 16101, "stead</w>": 11031, "steadily</w>": 35049, "steady</w>": 12937, "steak": 26955, "steak</w>": 8913, "steakhouse</w>": 35031, "steaks</w>": 30655, "steal": 37070, "steal</w>": 10181, "stealing</w>": 14242, "steals</w>": 20224, "stealth</w>": 25327, "steam": 10962, "steam</w>": 6972, "steamboat</w>": 41121, "steamed</w>": 29007, "steamer</w>": 49075, "steaming</w>": 43746, "steampunk</w>": 24130, "steamy</w>": 43104, "stec</w>": 46713, "stech": 48949, "stech</w>": 32455, "sted": 20426, "sted</w>": 1356, "stee": 31793, "steed</w>": 48293, "steel": 6938, "steel</w>": 4726, "steele</w>": 19460, "steelers</w>": 14430, "steen": 42851, "steen</w>": 18625, "steep": 28648, "steep</w>": 20714, "steer</w>": 27612, "steering</w>": 19833, "stef": 29158, "stefan": 15004, "stefan</w>": 18829, "stefani</w>": 38319, "stefano</w>": 30719, "steff": 30075, "stein": 13653, "stein</w>": 5818, "steiner</w>": 36314, "stel": 9102, "stel</w>": 10798, "stell": 22355, "stella": 46178, "stella</w>": 17869, "stellar</w>": 13810, "stellen": 42754, "stem": 24342, "stem</w>": 6761, "stemc": 40486, "stems</w>": 31503, "sten": 7652, "sten</w>": 7877, "stencil</w>": 47854, "stennis</w>": 45636, "step": 15572, "step</w>": 3348, "steph": 3522, "steph</w>": 16251, "stephan</w>": 37312, "stephani": 48121, "stephanie</w>": 14361, "stephen": 10421, "stephen</w>": 6078, "stephenking</w>": 46361, "stephens</w>": 22256, "stephenson</w>": 37280, "stepped</w>": 18384, "stepping</w>": 15906, "steps</w>": 5408, "ster": 1022, "ster</w>": 881, "stere": 9229, "stered</w>": 6935, "stereo": 15992, "stereo</w>": 17400, "stereotypes</w>": 27890, "steria</w>": 38804, "stering</w>": 14175, "sterling": 45790, "sterling</w>": 9378, "stern": 36254, "stern</w>": 2945, "steroids</w>": 37670, "sterone</w>": 39418, "sters</w>": 2132, "stery</w>": 24232, "stest</w>": 8556, "stev": 11640, "steve": 7412, "steve</w>": 3803, "steven": 10973, "steven</w>": 8016, "stevens</w>": 13877, "stevenson</w>": 25091, "stevie": 42104, "stevie</w>": 18969, "stew</w>": 17906, "stewar": 28453, "steward": 34980, "steward</w>": 43355, "stewards</w>": 49294, "stewardship</w>": 36720, "stewart</w>": 8120, "stfu</w>": 47000, "stg</w>": 48387, "stgeorge": 43698, "sth": 13456, "sth</w>": 34004, "sthe": 16491, "sthel": 42863, "sti": 860, "sti</w>": 12439, "stia</w>": 26492, "stible</w>": 25835, "stic": 5868, "stic</w>": 1561, "stical</w>": 16660, "stically</w>": 19041, "stick": 5483, "stick</w>": 4987, "sticker</w>": 11270, "stickers</w>": 11613, "sticking</w>": 21021, "sticks</w>": 10016, "sticky</w>": 18887, "stics</w>": 5449, "stie": 38164, "stie</w>": 11000, "stier</w>": 42069, "sties</w>": 16428, "stiff": 43471, "stiff</w>": 21441, "stig": 4088, "stig</w>": 42551, "stigate</w>": 15390, "stigma</w>": 20619, "stik</w>": 42247, "stil": 21790, "stil</w>": 37519, "stiles</w>": 33028, "still": 13209, "still</w>": 1170, "stills</w>": 20259, "stim": 18269, "stime</w>": 24711, "stimul": 16434, "stimulate</w>": 42380, "stimulating</w>": 41237, "stimulation</w>": 39530, "stimulus</w>": 47283, "stin": 2588, "stin</w>": 4025, "stina</w>": 22359, "stine</w>": 7098, "sting": 19868, "sting</w>": 1271, "stingly</w>": 49332, "stingray</w>": 43229, "stink</w>": 38213, "stinky</w>": 44957, "stino</w>": 40658, "stint</w>": 33531, "stion</w>": 10812, "stip": 39869, "stips</w>": 44756, "stique</w>": 43305, "stir": 12416, "stir</w>": 19564, "stirling</w>": 23128, "stirring</w>": 39205, "stis</w>": 45224, "stit": 14110, "stitch": 30003, "stitch</w>": 14771, "stitched</w>": 36540, "stitcher</w>": 48204, "stitches</w>": 32360, "stitching</w>": 45208, "stitu": 14585, "stitutes</w>": 40479, "stive</w>": 22426, "stix</w>": 48829, "stjohn": 36153, "stl": 14179, "stl</w>": 12527, "stlblues</w>": 44138, "stlcards</w>": 28644, "stle</w>": 7698, "stles</w>": 48638, "stlouis": 40358, "stlouis</w>": 39516, "stm</w>": 28333, "stn</w>": 27175, "sto": 928, "sto</w>": 5723, "stock": 5899, "stock</w>": 3206, "stocked</w>": 23552, "stockholm</w>": 16024, "stocki": 42944, "stocking</w>": 17335, "stockings</w>": 28040, "stockmarket</w>": 40359, "stockport</w>": 35569, "stocks</w>": 9321, "stockton</w>": 26130, "stoday</w>": 22392, "stok</w>": 43782, "stoke": 31338, "stoke</w>": 13550, "stoked</w>": 13160, "stokes</w>": 27512, "stol": 11401, "stol</w>": 6700, "stole</w>": 10995, "stolen</w>": 8704, "stolic</w>": 45020, "stom": 2343, "stom</w>": 38068, "stoma</w>": 43545, "stomach</w>": 14722, "stomp</w>": 40165, "stomping</w>": 46144, "ston": 4101, "ston</w>": 1839, "stone": 7694, "stone</w>": 2441, "stoned</w>": 36248, "stonehenge</w>": 42417, "stoner": 35131, "stoner</w>": 29115, "stones": 42659, "stones</w>": 6885, "stonewall</w>": 39688, "stoney</w>": 44198, "stony": 41717, "stony</w>": 35691, "stoo": 24505, "stood</w>": 9151, "stool": 34413, "stool</w>": 22314, "stop": 6005, "stop</w>": 1691, "stopbrexit</w>": 48680, "stopp": 15738, "stopped</w>": 6015, "stopper</w>": 32147, "stoppers</w>": 34457, "stopping</w>": 10735, "stops</w>": 9822, "stopthe": 26463, "stor": 809, "stor</w>": 17740, "storage</w>": 6824, "store": 17769, "store</w>": 2183, "stored</w>": 28257, "stores</w>": 6370, "storey</w>": 24025, "storians</w>": 34628, "stories</w>": 3784, "storing</w>": 40087, "stork</w>": 46452, "storm": 7434, "storm</w>": 2819, "stormed</w>": 45939, "stormhour</w>": 12161, "storming</w>": 24842, "storms</w>": 6464, "stormtrooper</w>": 49218, "stormy</w>": 20075, "stors</w>": 7178, "story": 6512, "story</w>": 1134, "storyline</w>": 37079, "storymonth</w>": 23717, "storyteller</w>": 35882, "storytelling</w>": 14457, "storytime</w>": 44197, "stos</w>": 19281, "stou": 37168, "stour": 37361, "stour</w>": 21928, "stout</w>": 16550, "stove</w>": 21423, "stow": 44284, "stow</w>": 17046, "stowe</w>": 34196, "stown": 28071, "stown</w>": 7939, "stp</w>": 30576, "stpatrick": 21343, "stpatricksday</w>": 22747, "str": 807, "str</w>": 15913, "stra": 1894, "stra</w>": 6253, "strack</w>": 46861, "strada</w>": 31134, "strade</w>": 48968, "straigh": 31016, "straight": 22114, "straight</w>": 4241, "strain</w>": 16887, "strains</w>": 38067, "strait</w>": 22946, "straits</w>": 41984, "stral</w>": 23289, "stralia</w>": 42510, "stran": 18411, "strand": 18214, "strand</w>": 17826, "stranded</w>": 22975, "strang": 11138, "strange": 33380, "strange</w>": 7288, "strangely</w>": 37566, "stranger": 35541, "stranger</w>": 14149, "strangers</w>": 20684, "strangerthings</w>": 43271, "strangest</w>": 46740, "strap</w>": 13946, "strapped</w>": 40922, "straps</w>": 31213, "stras": 36814, "stras</w>": 42125, "strasbourg</w>": 39576, "strat": 11345, "strat</w>": 32925, "strata</w>": 47278, "strate": 3532, "strate</w>": 28758, "strategi": 49102, "strategic</w>": 10246, "strategically</w>": 45706, "strategies</w>": 9942, "strategist</w>": 37180, "strategy</w>": 5637, "strates</w>": 45724, "stratford</w>": 23955, "strath": 21997, "stration</w>": 3156, "strato": 28878, "strauss</w>": 32033, "strava</w>": 34625, "stravel</w>": 43494, "straw": 7430, "straw</w>": 16438, "strawberries</w>": 17796, "strawberry</w>": 10233, "straws</w>": 33048, "stray": 30784, "stray</w>": 15712, "stre": 1079, "stre</w>": 19652, "stread</w>": 27797, "streak</w>": 11749, "streaks</w>": 42092, "stream": 8659, "stream</w>": 3322, "streamed</w>": 26280, "streamer</w>": 25178, "streamers</w>": 19937, "streaming</w>": 6278, "streamline</w>": 44917, "streams</w>": 13545, "stree": 35082, "stree</w>": 32438, "streep</w>": 38701, "street": 4839, "street</w>": 2012, "streetart</w>": 12948, "streetcar</w>": 34268, "streetfood</w>": 44486, "streetphotography</w>": 20786, "streets</w>": 6058, "streetstyle</w>": 39118, "streetwear</w>": 37298, "strel</w>": 39685, "stren": 4349, "streng": 4472, "strength": 15475, "strength</w>": 5959, "strengthen</w>": 16318, "strengthened</w>": 47131, "strengthening</w>": 23475, "strengthens</w>": 40280, "strengths</w>": 29268, "stress": 17297, "stress</w>": 5843, "stressed</w>": 16497, "stresses</w>": 32112, "stressful</w>": 24268, "stressing</w>": 35917, "stret": 12265, "stretch</w>": 10064, "stretched</w>": 29393, "stretches</w>": 32231, "stretching</w>": 24423, "stri": 1493, "stri</w>": 27795, "stria</w>": 39620, "strial</w>": 30217, "strian</w>": 12924, "stric": 2607, "strick": 25181, "strickland</w>": 48939, "strict</w>": 21585, "strictly</w>": 16475, "stride</w>": 36024, "strides</w>": 37355, "stries</w>": 18171, "strife</w>": 46473, "strike": 20774, "strike</w>": 5767, "striker</w>": 12448, "strikers</w>": 33465, "strikes</w>": 9280, "striking</w>": 13392, "string": 25512, "string</w>": 9696, "strings</w>": 15699, "strip</w>": 9317, "stripe</w>": 19368, "striped</w>": 22192, "stripes</w>": 14239, "stripped</w>": 26602, "stripper</w>": 45759, "stripping</w>": 48588, "strips</w>": 19000, "strive</w>": 22140, "striving</w>": 37671, "stro": 3121, "stro</w>": 6186, "stroke": 44621, "stroke</w>": 10403, "strokes</w>": 26595, "strol": 30123, "stroll</w>": 15924, "stroller</w>": 47076, "strolling</w>": 40911, "strom</w>": 14707, "stron": 4165, "strong": 10436, "strong</w>": 2389, "stronger": 27760, "stronger</w>": 9245, "strongertogether</w>": 38532, "strongest</w>": 16171, "strongh": 38678, "strongly</w>": 15507, "strophy</w>": 47912, "strou": 48425, "stroud</w>": 39895, "strous</w>": 23752, "stru": 1666, "struc": 3311, "struck</w>": 10861, "struction</w>": 12497, "structural</w>": 16899, "structure</w>": 5285, "structured</w>": 27147, "structures</w>": 14171, "structuring</w>": 37496, "strugg": 5176, "struggle</w>": 8443, "struggled</w>": 32921, "struggles</w>": 17446, "struggling</w>": 12135, "struly": 34118, "strum</w>": 37632, "strung</w>": 46033, "strust</w>": 23920, "strut</w>": 48375, "stry": 17325, "stry</w>": 2245, "sts</w>": 1088, "stu": 858, "stu</w>": 23531, "stuart": 32054, "stuart</w>": 11723, "stub": 27066, "stubborn</w>": 38955, "stuck</w>": 6596, "stud": 22368, "stud</w>": 13319, "studded</w>": 29153, "studen": 44156, "student": 14681, "student</w>": 2556, "students</w>": 1712, "studi": 5691, "studied</w>": 21369, "studies</w>": 6426, "studio": 17798, "studio</w>": 3155, "studios</w>": 6231, "studs</w>": 27571, "study": 21051, "study</w>": 3123, "studyabroad</w>": 45425, "studying</w>": 8826, "stuff": 46072, "stuff</w>": 3487, "stuffed</w>": 11781, "stuffing</w>": 31612, "stuffs</w>": 43455, "stuk</w>": 32424, "stumb": 16784, "stumble</w>": 39045, "stumbled</w>": 21776, "stump</w>": 32064, "stun": 3088, "stun</w>": 37959, "stunned</w>": 34034, "stunner</w>": 29965, "stunning</w>": 3769, "stunningly</w>": 47515, "stuns</w>": 43796, "stunt</w>": 19905, "stunts</w>": 40118, "stupi": 18975, "stupid": 42600, "stupid</w>": 8085, "stupidity</w>": 33766, "stur": 10676, "sturdy</w>": 43780, "stures</w>": 27223, "sturgeon</w>": 31580, "sturi": 21747, "sturridge</w>": 45331, "stutt": 30444, "stuttgart</w>": 32219, "stv": 27060, "stv</w>": 9708, "stweet</w>": 46832, "stweets</w>": 39174, "stx</w>": 42548, "sty": 1421, "sty</w>": 2920, "style": 12356, "style</w>": 1844, "styled</w>": 17974, "styles</w>": 6948, "styli": 38577, "styling</w>": 14597, "stylish</w>": 10378, "stylist</w>": 15928, "styn</w>": 41394, "su": 605, "su</w>": 2937, "sua</w>": 42448, "suarez</w>": 21437, "suave</w>": 47305, "sub": 1783, "sub</w>": 7765, "subaru</w>": 21319, "subjec": 16090, "subject</w>": 10300, "subjects</w>": 22099, "subli": 16350, "sublime</w>": 22367, "submarine</w>": 19968, "submer": 27156, "submerged</w>": 43171, "submission</w>": 16571, "submissions</w>": 21566, "submit</w>": 10423, "submitted</w>": 15189, "submitting</w>": 38788, "subram": 49207, "subs</w>": 16398, "subscri": 5838, "subscribe</w>": 9839, "subscribed</w>": 44867, "subscriber</w>": 36292, "subscribers</w>": 17337, "subscription</w>": 17979, "subscriptions</w>": 47162, "subsequ": 33598, "subsequent</w>": 44323, "subsi": 14856, "subsidi": 45029, "subsidiary</w>": 45506, "subsidies</w>": 37685, "subsidy</w>": 47462, "substan": 17487, "substance</w>": 19309, "substances</w>": 36834, "substantial</w>": 27171, "substantially</w>": 47577, "substitu": 18529, "substitute</w>": 25340, "subtitles</w>": 39479, "subtle</w>": 16536, "subur": 12517, "suburb</w>": 37664, "suburban</w>": 23570, "suburbs</w>": 25317, "subway</w>": 12196, "suc": 1869, "succe": 7981, "succeed</w>": 13556, "succeeded</w>": 41077, "succes": 39019, "success": 3695, "success</w>": 3034, "successes</w>": 29436, "successful</w>": 4670, "successfully</w>": 9934, "succession</w>": 38491, "successive</w>": 41319, "successor</w>": 34774, "succu": 45253, "succul": 25671, "succulent</w>": 35236, "such</w>": 2046, "suction</w>": 42786, "sud": 8067, "sud</w>": 33714, "sudan": 31149, "sudan</w>": 13474, "sudanese</w>": 42837, "sudbury</w>": 32488, "sudden": 10833, "sudden</w>": 15433, "suddenly</w>": 11076, "sue": 14045, "sue</w>": 6641, "sued</w>": 22225, "suede</w>": 21036, "sues</w>": 17105, "suf": 21204, "suf</w>": 22579, "sufc</w>": 37091, "suff": 4866, "suffe": 13510, "suffer</w>": 13557, "suffered</w>": 14766, "suffering</w>": 10140, "suffers</w>": 22389, "sufficient</w>": 28410, "suffol": 13775, "suffolk": 46408, "suffolk</w>": 15685, "suffra": 34596, "suffrage</w>": 39567, "sufi</w>": 39756, "sug": 3189, "suga</w>": 28757, "sugar": 12418, "sugar</w>": 5574, "sugge": 6345, "suggest</w>": 13356, "suggested</w>": 18790, "suggesti": 15033, "suggesting</w>": 29792, "suggestion</w>": 23741, "suggestions</w>": 16052, "suggests</w>": 13333, "suho</w>": 32744, "sui</w>": 24972, "suici": 16372, "suicidal</w>": 37165, "suicide": 31310, "suicide</w>": 8247, "suing</w>": 18309, "suisse</w>": 35964, "suit": 11887, "suit</w>": 3940, "suitable</w>": 17476, "suitcase</w>": 27792, "suite</w>": 9346, "suited</w>": 25919, "suites</w>": 21523, "suits</w>": 9949, "suk": 24820, "suk</w>": 6886, "suka</w>": 44017, "suke</w>": 25590, "sukh": 46961, "suki</w>": 32704, "sul": 1767, "sul</w>": 19879, "sula": 34713, "sula</w>": 26143, "sullivan</w>": 14477, "sully</w>": 37752, "sulph": 37234, "sulphur</w>": 47659, "sultan": 35650, "sultan</w>": 17049, "sum": 7054, "sum</w>": 8257, "suma</w>": 47938, "sumat": 32640, "sumatra</w>": 47346, "sume</w>": 45457, "sumi</w>": 41248, "summ": 1309, "summar": 34657, "summari": 31993, "summary</w>": 13435, "summed</w>": 34912, "summer": 5500, "summer</w>": 1673, "summers</w>": 18254, "summerslam</w>": 40264, "summertime</w>": 19025, "summit": 30011, "summit</w>": 3768, "summon": 27622, "summon</w>": 39782, "sumner</w>": 46813, "sumo</w>": 33734, "sump": 34252, "sumptuous</w>": 47354, "sums</w>": 13325, "sun": 968, "sun</w>": 2176, "sunbathing</w>": 46994, "sunburn</w>": 45767, "sund</w>": 40735, "sundae</w>": 38078, "sundance</w>": 24128, "sundar": 44936, "sunday": 6649, "sunday</w>": 1706, "sundayfunday</w>": 21565, "sundaymorning</w>": 24809, "sundaymotivation</w>": 46227, "sundays</w>": 15827, "sundaywith": 26469, "sundaywithmarsha</w>": 26662, "sunder": 15097, "sunderland": 45727, "sunderland</w>": 18851, "sundown</w>": 44438, "sune</w>": 41096, "sunflower</w>": 21559, "sunflowers</w>": 39809, "sung": 16903, "sung</w>": 6047, "sunglasses</w>": 12906, "suni": 17663, "suni</w>": 47010, "sunil</w>": 32861, "sunite</w>": 21382, "sunited</w>": 35276, "sunk</w>": 37534, "sunken</w>": 43473, "sunlight</w>": 17996, "sunni</w>": 44315, "sunny": 15632, "sunny</w>": 5438, "sunrise</w>": 5610, "suns</w>": 18322, "sunscreen</w>": 29355, "sunset": 37880, "sunset</w>": 3424, "sunsets</w>": 17721, "sunshine": 32761, "sunshine</w>": 5385, "suny</w>": 41308, "sup": 19078, "sup</w>": 8249, "supdates</w>": 24177, "super": 1642, "super</w>": 1994, "superb</w>": 8930, "superbike</w>": 45709, "superbowl": 47461, "superbowl</w>": 16467, "supercar</w>": 27021, "supercars</w>": 32185, "supercell</w>": 43227, "supercharged</w>": 47479, "supere": 46831, "superfood</w>": 41715, "supergirl</w>": 25771, "superhero</w>": 14049, "superheroes</w>": 23334, "superint": 17615, "superintendent</w>": 19020, "superior</w>": 13205, "superjunior</w>": 40475, "superleague</w>": 45539, "superman</w>": 11237, "supermarket</w>": 19897, "supermarkets</w>": 45106, "supermodel</w>": 41963, "supermoon</w>": 36571, "supernatural</w>": 15484, "supernova</w>": 39843, "superrugby</w>": 48717, "supersonic</w>": 42019, "supersport</w>": 46319, "superst": 38202, "superstar": 32551, "superstar</w>": 10472, "superstars</w>": 25797, "supervis": 12709, "supervised</w>": 41316, "supervision</w>": 36234, "supervisor</w>": 20366, "supervisors</w>": 37958, "superyacht</w>": 42714, "supp": 1023, "supper</w>": 15727, "supple": 31431, "supplement</w>": 19924, "supplements</w>": 21265, "supplied</w>": 24106, "supplier</w>": 18043, "suppliers</w>": 24196, "supplies</w>": 9384, "supply": 25074, "supply</w>": 6389, "supplychain</w>": 31224, "supplying</w>": 32739, "suppo": 6941, "suppor": 2104, "support": 12062, "support</w>": 1425, "supported</w>": 8038, "supporter</w>": 12992, "supporters</w>": 7403, "supportindiefilm</w>": 43976, "supporting</w>": 3976, "supportive</w>": 18313, "supportlocal</w>": 43852, "supports</w>": 8336, "supportsmall": 30941, "supportsmallstreamers</w>": 36097, "suppose</w>": 18924, "supposed</w>": 9119, "supposedly</w>": 32302, "suppre": 20542, "suppression</w>": 36508, "supra</w>": 48485, "supre": 5875, "supremac": 28643, "supremacist</w>": 39005, "supremacy</w>": 28913, "supreme": 35222, "supreme</w>": 7468, "supt</w>": 23625, "sur": 1090, "sur</w>": 7123, "sura": 33412, "sura</w>": 49125, "surabaya</w>": 45227, "surance</w>": 22184, "surat</w>": 30201, "sure": 14320, "sure</w>": 1650, "sured</w>": 36869, "surely</w>": 11409, "sures</w>": 12725, "suresh": 32118, "suresh</w>": 31464, "sureshpp": 41924, "sureshpprabhu</w>": 42050, "surf": 10176, "surf</w>": 10322, "surface</w>": 7744, "surfaces</w>": 20746, "surfer</w>": 24925, "surfers</w>": 34842, "surfing</w>": 15762, "surg": 13045, "surge</w>": 17457, "surgeon</w>": 16039, "surgeons</w>": 26000, "surger": 5122, "surgeries</w>": 34940, "surgery</w>": 5344, "surgical</w>": 16386, "suri": 14130, "suri</w>": 33952, "suring</w>": 16817, "suriya</w>": 17832, "surpass</w>": 45494, "surpassed</w>": 25648, "surplus</w>": 29413, "surpri": 3244, "surprise</w>": 5099, "surprised</w>": 8949, "surprises</w>": 16920, "surprising</w>": 14964, "surprisingly</w>": 17367, "surreal</w>": 18408, "surrealism</w>": 41773, "surrender</w>": 20964, "surrendered</w>": 44601, "surrey": 26489, "surrey</w>": 14315, "surro": 47499, "surroun": 8250, "surround": 26543, "surround</w>": 22999, "surrounded</w>": 13589, "surrounding</w>": 12544, "surroundings</w>": 26915, "surrounds</w>": 39012, "suru</w>": 49240, "surve": 8952, "surveill": 15408, "surveillance</w>": 15578, "survey": 45914, "survey</w>": 6809, "surveying</w>": 33085, "surveys</w>": 25096, "survi": 3440, "surviv": 12922, "survival</w>": 10172, "survive</w>": 10431, "survived</w>": 13483, "survives</w>": 30927, "surviving</w>": 18609, "survivor": 31934, "survivor</w>": 10944, "survivors</w>": 13711, "surya</w>": 37767, "sus": 8091, "sus</w>": 3036, "susa</w>": 20546, "susan": 19922, "susan</w>": 10168, "suscep": 44270, "sush": 22298, "sushi</w>": 11729, "sushmaswar": 48200, "susie</w>": 32284, "susp": 7971, "suspec": 10298, "suspect</w>": 9065, "suspected</w>": 15579, "suspects</w>": 18265, "suspen": 10578, "suspend</w>": 41007, "suspended</w>": 13126, "suspends</w>": 39535, "suspense</w>": 21556, "suspension</w>": 15417, "suspici": 25714, "suspicion</w>": 34910, "suspicious</w>": 19862, "sussex": 31244, "sussex</w>": 13266, "sustain": 4644, "sustain</w>": 28156, "sustainability</w>": 9635, "sustainable": 23645, "sustainable</w>": 7078, "sustained</w>": 22699, "sustaining</w>": 44418, "sut": 23984, "sut</w>": 28956, "sutherland</w>": 27592, "sutton": 39359, "sutton</w>": 18564, "suv</w>": 15985, "suz": 9957, "suzanne</w>": 24617, "suzu": 36289, "suzuki</w>": 16892, "suzy</w>": 26552, "sv": 6508, "sv</w>": 17083, "svc</w>": 45065, "sve": 47637, "sven": 37786, "sven</w>": 45183, "sver": 45923, "sville": 44580, "sville</w>": 6741, "svp</w>": 28465, "svt</w>": 42014, "svu</w>": 32123, "sw": 1220, "sw</w>": 4457, "swa": 4707, "swa</w>": 31916, "swach": 20862, "swachhb": 31898, "swachhbharat</w>": 36927, "swag": 8852, "swag</w>": 8177, "swagg</w>": 47702, "swagger</w>": 35797, "swain</w>": 43226, "swal": 13433, "swallow</w>": 28979, "swallowed</w>": 46956, "swallows</w>": 45124, "swam</w>": 42539, "swami</w>": 25021, "swamp": 41953, "swamp</w>": 16595, "swamy</w>": 28445, "swan": 8215, "swan</w>": 12530, "swana</w>": 24699, "swans</w>": 19516, "swansea</w>": 16567, "swanson</w>": 34797, "swap</w>": 15234, "swapped</w>": 39077, "swapping</w>": 44702, "swaps</w>": 49242, "swar": 11680, "swarm</w>": 31577, "swarovski</w>": 28515, "swat": 32547, "swat</w>": 26482, "swatch</w>": 48053, "sway": 26443, "sway</w>": 26617, "swc</w>": 42231, "swe": 2350, "swe</w>": 38070, "swear</w>": 7406, "swearing</w>": 32627, "sweat": 10282, "sweat</w>": 12663, "sweater</w>": 11455, "sweaters</w>": 31303, "sweating</w>": 33215, "sweats</w>": 39321, "sweatshirt</w>": 22442, "sweaty</w>": 28419, "sweden</w>": 8760, "swedish</w>": 11585, "swee": 1812, "sweek</w>": 30017, "sweeney</w>": 27286, "sweep": 23220, "sweep</w>": 13669, "sweeping</w>": 25719, "sweeps</w>": 26887, "sweepstakes</w>": 25992, "sweet": 10957, "sweet</w>": 2418, "sweetened</w>": 45577, "sweeter</w>": 32873, "sweetest</w>": 15180, "sweethe": 16316, "sweetheart</w>": 18079, "sweetie</w>": 24450, "sweetness</w>": 29713, "sweets</w>": 18045, "swel": 48470, "swell": 35538, "swell</w>": 21490, "swelling</w>": 46578, "swept</w>": 23311, "swer": 30514, "swfc</w>": 30227, "swfl</w>": 46607, "swi": 3881, "swi</w>": 45223, "swick</w>": 17159, "swif": 28548, "swift": 34843, "swift</w>": 8229, "swild": 33909, "swild</w>": 38696, "swildlife</w>": 46818, "swim": 4928, "swim</w>": 7681, "swimmer</w>": 25475, "swimmers</w>": 27776, "swimming</w>": 7411, "swims</w>": 46798, "swimsuit</w>": 25504, "swimwear</w>": 31889, "swin": 14554, "swin</w>": 40798, "swindon</w>": 29540, "swine</w>": 31166, "swing": 25292, "swing</w>": 7429, "swinging</w>": 26760, "swings</w>": 29141, "swipe</w>": 31828, "swire</w>": 42753, "swirl</w>": 35795, "swis": 23611, "swish</w>": 38571, "swiss": 37917, "swiss</w>": 9287, "swit": 3726, "switch": 22480, "switch</w>": 5893, "switched</w>": 22869, "switches</w>": 33569, "switching</w>": 21155, "swith": 17299, "switzer": 9835, "switzerland</w>": 9912, "swivel</w>": 48256, "swo": 38673, "swol": 29575, "swollen</w>": 36129, "swoo": 29744, "swood</w>": 24158, "swoon</w>": 37028, "swoop</w>": 45661, "sword": 33294, "sword</w>": 11356, "swords</w>": 27181, "swork</w>": 42722, "sworld</w>": 33305, "sworn</w>": 21130, "sworth</w>": 13322, "swt</w>": 38878, "swx</w>": 20597, "sx": 9402, "sx</w>": 17806, "sxsw</w>": 13369, "sy": 974, "sy</w>": 2126, "sya</w>": 35017, "sycam": 34911, "sycamore</w>": 43086, "syd": 4525, "syd</w>": 22504, "sydney": 15878, "sydney</w>": 5278, "syed</w>": 27624, "syfy</w>": 32047, "sykes</w>": 27287, "syl": 6452, "sylla": 41708, "sylvania</w>": 12011, "sylve": 28369, "sylvester</w>": 37214, "sylvia</w>": 25670, "sym": 3645, "sym</w>": 40327, "symb": 22987, "symbol</w>": 13085, "symboli": 22019, "symbolic</w>": 33177, "symbolism</w>": 44679, "symbols</w>": 25476, "symmetry</w>": 31427, "symp": 11468, "sympathi": 47493, "sympathy</w>": 32477, "symph": 9544, "symphonic</w>": 42639, "symphony</w>": 11180, "sympo": 9730, "symposium</w>": 9971, "symptom</w>": 47799, "symptoms</w>": 12956, "syn": 3758, "syn</w>": 36090, "synago": 30945, "synagogue</w>": 33518, "sync</w>": 20081, "synchron": 23943, "syndic": 21098, "syndicate</w>": 28779, "syndrome</w>": 10927, "syner": 22283, "synergy</w>": 32012, "syno": 31533, "synod</w>": 47712, "synopsis</w>": 47018, "synth": 33841, "synth</w>": 24462, "synthe": 22604, "synthesi": 33565, "synthesis</w>": 21602, "synthesizer</w>": 44077, "synthetic</w>": 19917, "syou": 26742, "syour": 21718, "syrac": 17279, "syracuse</w>": 19640, "syrah</w>": 45364, "syri": 18917, "syria</w>": 5563, "syrian": 47562, "syrian</w>": 10041, "syrians</w>": 41392, "syrup</w>": 16611, "sys</w>": 26726, "syste": 1933, "system": 47813, "system</w>": 2422, "systematic</w>": 28586, "systemic</w>": 33807, "systems</w>": 4828, "sz": 13438, "sz</w>": 15879, "sze": 44507, "szn</w>": 48092, "são</w>": 45911, "sé": 37879, "t": 83, "t</w>": 339, "ta": 648, "ta</w>": 1397, "taa</w>": 43874, "tab": 2648, "tab</w>": 14724, "tabby</w>": 36145, "tabern": 48991, "tability</w>": 15770, "table": 12108, "table</w>": 2175, "tableau</w>": 39723, "tables</w>": 7822, "tablet</w>": 12494, "tabletop": 46843, "tabletop</w>": 25773, "tablets</w>": 20436, "tably</w>": 24440, "taboo</w>": 38400, "tabs</w>": 29163, "tac": 3145, "tac</w>": 22653, "tache</w>": 39239, "tack": 6339, "tack</w>": 34446, "tackle</w>": 10294, "tackled</w>": 47218, "tackles</w>": 18021, "tackling</w>": 19628, "taco": 31924, "taco</w>": 12436, "tacoma</w>": 25397, "tacos</w>": 14090, "tactic</w>": 40377, "tactical</w>": 17137, "tactics</w>": 16410, "tacular</w>": 48985, "tad": 15890, "tad</w>": 19860, "tado</w>": 40846, "tae": 15257, "tae</w>": 15580, "taehyung</w>": 24642, "taek": 30753, "taekwondo</w>": 39963, "taemin</w>": 30600, "taeyang</w>": 45802, "taeyeon</w>": 27389, "taf": 29660, "taft</w>": 42141, "tag": 3456, "tag</w>": 3640, "tage</w>": 2669, "tages</w>": 39902, "tagged</w>": 12969, "tagging</w>": 25138, "tagne</w>": 47467, "tags</w>": 11606, "tah": 14822, "tah</w>": 7090, "tahit": 45385, "tahoe</w>": 26140, "tai": 6511, "tai</w>": 13040, "taiji</w>": 30185, "tail": 7156, "tail</w>": 4132, "tailed</w>": 20626, "tailgate</w>": 23168, "tailgating</w>": 42625, "tailo": 27230, "tailor</w>": 29870, "tailored</w>": 28275, "tailoring</w>": 46357, "tails</w>": 16066, "tain": 2841, "tain</w>": 1908, "taine": 21214, "taine</w>": 32299, "tained</w>": 10212, "taining</w>": 7565, "tainment</w>": 30063, "tains</w>": 3952, "tainted</w>": 47211, "taipei</w>": 24356, "tair": 29143, "tairp</w>": 43707, "tait</w>": 45325, "taiwan": 36319, "taiwan</w>": 12626, "taiwanese</w>": 41416, "taj": 28937, "taj</w>": 24805, "taji": 46358, "tak": 15070, "tak</w>": 14458, "taka": 24070, "taka</w>": 40968, "take": 5052, "take</w>": 1172, "takeaway</w>": 25737, "takeaways</w>": 32080, "takeme": 41748, "taken</w>": 2807, "takeoff</w>": 32789, "takeover</w>": 11863, "taker</w>": 17939, "takers</w>": 30775, "takes</w>": 2633, "takin</w>": 30890, "taking</w>": 2019, "taku</w>": 48168, "tal": 976, "tal</w>": 2066, "tala</w>": 29845, "talaga</w>": 35349, "talbot</w>": 30585, "tale": 33971, "tale</w>": 7798, "talent": 30435, "talent</w>": 5114, "talented</w>": 5331, "talents</w>": 16136, "tales</w>": 9469, "tali": 12122, "tali</w>": 45406, "taliban</w>": 20788, "talis": 36480, "tality</w>": 15631, "talk": 12462, "talk</w>": 1841, "talked</w>": 10153, "talkin</w>": 26040, "talking": 31463, "talking</w>": 2578, "talks</w>": 3237, "tall": 11664, "tall</w>": 7771, "talla": 21528, "tallade": 44220, "tallahassee</w>": 37832, "taller</w>": 23470, "tallest</w>": 19774, "tallinn</w>": 45079, "tally</w>": 16323, "talon</w>": 47897, "tam": 2661, "tam</w>": 12246, "tama</w>": 45424, "tamanna": 48055, "tamar": 22901, "tamara</w>": 35697, "tame": 38557, "tame</w>": 32778, "tamed</w>": 40575, "tami</w>": 39429, "tamil": 23046, "tamil</w>": 14033, "tamilnadu</w>": 32371, "tamine</w>": 42566, "tammy</w>": 28396, "tampa</w>": 10906, "tampab": 37852, "tamu</w>": 34105, "tan": 2123, "tan</w>": 5039, "tana</w>": 21396, "tand": 20244, "tandem</w>": 33756, "tane": 13344, "tane</w>": 24923, "taneous</w>": 22275, "taneously</w>": 24422, "tang": 10425, "tang</w>": 20794, "tanger": 31844, "tangerine</w>": 42045, "tangible</w>": 44823, "tangle</w>": 36568, "tangled</w>": 33587, "tango</w>": 24089, "tani": 31374, "tani</w>": 32985, "tania</w>": 45369, "tank": 29858, "tank</w>": 6172, "tanker</w>": 25020, "tanks</w>": 14223, "tann": 19174, "tanner</w>": 22001, "tanning</w>": 27985, "tans</w>": 27332, "tant": 41383, "tant</w>": 41695, "tante</w>": 48262, "tanto</w>": 45685, "tany": 34410, "tanya</w>": 26800, "tanz": 47399, "tanzania</w>": 15711, "tao": 29084, "tao</w>": 18923, "tap": 17923, "tap</w>": 7888, "tapas</w>": 27361, "tape": 18332, "tape</w>": 5749, "taped</w>": 33219, "tapes</w>": 17903, "tapestry</w>": 33525, "taping</w>": 24355, "tapp": 27644, "tapp</w>": 27764, "tapped</w>": 26649, "tapping</w>": 27882, "tapro": 34415, "taproom</w>": 40266, "taps</w>": 23267, "tar": 2002, "tar</w>": 6977, "tara</w>": 15264, "tarak</w>": 37813, "taran": 32370, "tarantino</w>": 41180, "tarde</w>": 48670, "tardis</w>": 35410, "tares</w>": 34587, "targe": 9620, "target": 38556, "target</w>": 5400, "targeted</w>": 14968, "targeting</w>": 15818, "targets</w>": 12468, "tari": 4238, "tari</w>": 38012, "tarian</w>": 11762, "tarians</w>": 42789, "taries</w>": 47291, "tariff</w>": 40220, "tariffs</w>": 28335, "tariq</w>": 42526, "tarmac</w>": 44294, "taro</w>": 26264, "tarot</w>": 23702, "tart": 16707, "tart</w>": 14120, "tartan</w>": 35064, "tarts</w>": 29799, "tary": 31729, "tary</w>": 5065, "tarzan</w>": 45463, "tas": 6538, "tas</w>": 10163, "tash": 35272, "tasha</w>": 44967, "task": 39189, "task</w>": 10549, "tasks</w>": 19453, "tasmania</w>": 22429, "tasmanian</w>": 45102, "tassel</w>": 49276, "tast": 10839, "taste": 14314, "taste</w>": 5219, "tasted</w>": 22827, "tasteof": 38097, "taster</w>": 29743, "tastes</w>": 13736, "tastic</w>": 21337, "tasting</w>": 7656, "tastings</w>": 49273, "tasty": 43390, "tasty</w>": 8568, "tat": 2652, "tat</w>": 21592, "tata</w>": 19300, "tate": 44476, "tate</w>": 13295, "tath": 27566, "tati": 31433, "tatiana</w>": 48837, "tation</w>": 5280, "tations</w>": 32324, "tator</w>": 18791, "tators</w>": 37206, "tats</w>": 44557, "tatt": 9232, "tatted</w>": 41605, "tattoo": 15980, "tattoo</w>": 6325, "tattooed</w>": 28541, "tattoos</w>": 14900, "tatum</w>": 26103, "tau": 6620, "tau</w>": 20510, "taught</w>": 9306, "taun": 23910, "taunton</w>": 40681, "taurus</w>": 32881, "taver": 37776, "tavern</w>": 18644, "taw": 33868, "taw</w>": 40289, "tawa</w>": 29035, "tawards</w>": 14351, "tax": 4581, "tax</w>": 3879, "taxation</w>": 36847, "taxes</w>": 11462, "taxi": 25160, "taxi</w>": 11380, "taxider": 47420, "taxis</w>": 34009, "taxpay": 17986, "taxpayer</w>": 30978, "taxpayers</w>": 25503, "tay": 6542, "tay</w>": 15073, "taya</w>": 38484, "tayl": 3913, "taylor": 9044, "taylor</w>": 3961, "taylorswift</w>": 18936, "tayo</w>": 33941, "taz": 41475, "taz</w>": 31870, "tb": 1990, "tb</w>": 7490, "tba</w>": 34363, "tball": 8390, "tball</w>": 1467, "tbc</w>": 31807, "tbd</w>": 45548, "tbh</w>": 13238, "tbi</w>": 45868, "tbl</w>": 42962, "tbli": 43664, "tblightning</w>": 44178, "tbo</w>": 34255, "tbr</w>": 46643, "tbs</w>": 37368, "tbt</w>": 2950, "tc": 6820, "tc</w>": 5454, "tca</w>": 35116, "tch": 10744, "tch</w>": 4048, "tches</w>": 42001, "tcm": 21501, "tcm</w>": 26588, "tcmparty</w>": 24338, "tcot</w>": 8995, "tcs</w>": 39107, "tcu</w>": 26791, "td": 20578, "td</w>": 3192, "tdf</w>": 21844, "tdi</w>": 45621, "tdp</w>": 47009, "tds</w>": 20238, "tdsb</w>": 29836, "te": 600, "te</w>": 756, "tea": 41053, "tea</w>": 3274, "teach": 2043, "teach</w>": 6865, "teacher": 18051, "teacher</w>": 4008, "teachers</w>": 5069, "teaches</w>": 17110, "teaching</w>": 5141, "teachings</w>": 32119, "teal</w>": 22821, "team": 2085, "team</w>": 1027, "teamcanada</w>": 46636, "teamed</w>": 20590, "teamgb</w>": 40971, "teaming</w>": 24392, "teammate</w>": 17900, "teammates</w>": 13921, "teams</w>": 3891, "teamsisd</w>": 34703, "teamusa</w>": 28625, "teamwork</w>": 14657, "teaparty</w>": 33065, "teapo": 35745, "teapot</w>": 40749, "tear": 15802, "tear</w>": 11862, "tearful</w>": 46873, "tearing</w>": 24785, "tears</w>": 7688, "teas": 23003, "teas</w>": 29314, "tease</w>": 25163, "teased</w>": 49122, "teaser</w>": 8982, "teasers</w>": 48990, "teases</w>": 28509, "teasing</w>": 36507, "teat": 26376, "teatime</w>": 48948, "teatro</w>": 35756, "teau</w>": 24931, "tebow</w>": 37797, "tec": 17381, "tec</w>": 11612, "tech": 1782, "tech</w>": 2061, "techcrunch</w>": 42110, "techn": 6252, "technews</w>": 31787, "technic": 16639, "technic</w>": 37666, "technical": 49231, "technical</w>": 7582, "technically</w>": 23180, "technician</w>": 22540, "technicians</w>": 35513, "techno": 2599, "techno</w>": 17564, "technological</w>": 23068, "technologies</w>": 10040, "technology</w>": 3089, "techs</w>": 41353, "ted": 4841, "ted</w>": 775, "tedcruz</w>": 27517, "teddy": 25758, "teddy</w>": 11798, "tedly</w>": 8539, "tedu</w>": 42517, "tedx": 17950, "tedx</w>": 41504, "tee": 12676, "tee</w>": 3385, "teed</w>": 13692, "teen": 5398, "teen</w>": 4697, "teenage</w>": 14069, "teenager</w>": 19338, "teenagers</w>": 25989, "teenchoice</w>": 28203, "teens</w>": 12375, "teenth</w>": 20249, "teenwolf</w>": 40067, "teeny</w>": 41622, "teer</w>": 48648, "tees</w>": 9641, "teessi": 43295, "teeth</w>": 8225, "tega</w>": 29508, "tegr": 39801, "teh": 18720, "teh</w>": 29601, "tehran</w>": 26399, "tein</w>": 33223, "tej</w>": 46724, "tek": 17489, "tek</w>": 18294, "tekken</w>": 29843, "tel": 4978, "tel</w>": 2226, "telang": 23469, "telangana</w>": 26386, "tele": 3103, "tele</w>": 32851, "telecom</w>": 21057, "telecommunications</w>": 39900, "telegram</w>": 26780, "telegraph</w>": 14713, "telephone</w>": 17243, "telescope</w>": 19037, "telethon</w>": 49266, "televised</w>": 39470, "television</w>": 8608, "telford</w>": 38323, "tell": 16069, "tell</w>": 2330, "teller</w>": 20415, "tellers</w>": 42707, "telling</w>": 5507, "tells</w>": 5217, "tellu": 42511, "telly</w>": 31475, "tels</w>": 43607, "telugu</w>": 22927, "tely</w>": 5630, "tem": 2404, "tem</w>": 17536, "tema</w>": 45881, "teme": 43378, "temp": 2684, "temp</w>": 11097, "tempe</w>": 36723, "temper": 5981, "temper</w>": 35521, "temperature</w>": 9543, "temperatures</w>": 11575, "tempered</w>": 40521, "tempest</w>": 36053, "templ": 16679, "template</w>": 18591, "templates</w>": 30498, "temple": 21841, "temple</w>": 5620, "temples</w>": 24024, "tempo</w>": 19625, "tempor": 4858, "temporal</w>": 43656, "temporarily</w>": 23189, "temporary</w>": 6513, "temps</w>": 11668, "tempt</w>": 28460, "temptation</w>": 30118, "tempted</w>": 26226, "tempting</w>": 34876, "ten": 1149, "ten</w>": 2581, "tenant</w>": 16954, "tenants</w>": 26023, "tenay</w>": 45384, "tenberg</w>": 31329, "tend": 17630, "tend</w>": 21252, "tendency</w>": 47277, "tender": 23020, "tender</w>": 9838, "tenderloin</w>": 42750, "tenders</w>": 44741, "tending</w>": 35084, "tendon</w>": 48459, "tends</w>": 39962, "tene": 24868, "tened</w>": 13682, "tener</w>": 29054, "teneri": 28000, "tenerife</w>": 29401, "teners</w>": 41307, "teness</w>": 18018, "teng": 34016, "teng</w>": 28474, "tennant</w>": 29310, "tennes": 9514, "tennessee</w>": 10053, "tennis": 31504, "tennis</w>": 5298, "tenor</w>": 30521, "tens</w>": 14062, "tense</w>": 23518, "tension</w>": 15221, "tensions</w>": 24224, "tenstein</w>": 49139, "tent": 18505, "tent</w>": 10782, "tentative</w>": 48238, "tenth</w>": 27483, "tention</w>": 12191, "tents</w>": 30730, "tenure</w>": 30739, "teo</w>": 18665, "tep</w>": 31806, "tequ": 17502, "tequila</w>": 18510, "ter": 704, "ter</w>": 652, "tera</w>": 15155, "teras</w>": 44830, "tere": 11329, "tered": 49272, "tered</w>": 4389, "terence</w>": 33806, "teresa</w>": 19081, "teri</w>": 30917, "teria</w>": 22685, "terie</w>": 42276, "tering</w>": 7929, "term": 40991, "term</w>": 4780, "termin": 4766, "terminal</w>": 11816, "terminals</w>": 44091, "terminator</w>": 29609, "terminology</w>": 48896, "terms</w>": 8663, "tern": 41572, "tern</w>": 12959, "terns</w>": 25251, "tero": 20727, "tero</w>": 24697, "terps</w>": 41471, "terr": 3921, "terra": 22366, "terra</w>": 18816, "terrac": 28549, "terrace</w>": 13820, "terraces</w>": 47508, "terracotta</w>": 45123, "terrain</w>": 20184, "terran": 43726, "terre": 33888, "terre</w>": 27537, "terrell</w>": 39494, "terrence</w>": 38746, "terrestrial</w>": 46299, "terri": 4504, "terri</w>": 36722, "terrible</w>": 9741, "terribly</w>": 34558, "terrier</w>": 14455, "terriers</w>": 47047, "terrific</w>": 13837, "terrified</w>": 28204, "terrifying</w>": 18526, "territ": 10720, "territorial</w>": 39163, "territories</w>": 32846, "territory</w>": 13936, "terror": 9596, "terror</w>": 9327, "terrori": 6836, "terrorism</w>": 10583, "terrorist</w>": 10575, "terrorists</w>": 12835, "terry": 19378, "terry</w>": 8561, "ters": 24102, "ters</w>": 1737, "terti": 48386, "tery</w>": 4184, "tes": 8019, "tes</w>": 3609, "tesco</w>": 15434, "tese</w>": 33320, "tesla</w>": 12254, "tess": 21807, "tess</w>": 20840, "tessa</w>": 32063, "test": 7738, "test</w>": 1628, "testam": 23477, "testament</w>": 24609, "tested</w>": 10576, "tester</w>": 32707, "testi": 18373, "testic": 42364, "testify</w>": 33088, "testifying</w>": 46347, "testim": 12553, "testimonial</w>": 28834, "testimony</w>": 18672, "testing</w>": 4967, "testo": 42428, "testosterone</w>": 45168, "tests</w>": 8715, "tet": 40468, "tet</w>": 13275, "tetra": 40902, "tetris</w>": 45934, "teu": 47152, "teuk</w>": 39979, "teur</w>": 27120, "tex": 2056, "tex</w>": 11728, "texan": 35287, "texan</w>": 38386, "texans</w>": 17580, "texanscheer</w>": 43717, "texas": 15713, "texas</w>": 3403, "texaste": 46469, "text": 18169, "text</w>": 4160, "textbook</w>": 25952, "textbooks</w>": 44041, "texted</w>": 29004, "textile</w>": 19789, "textiles</w>": 24326, "texting</w>": 18600, "texts</w>": 12767, "texture</w>": 16505, "textured</w>": 32168, "textures</w>": 28063, "tey</w>": 32395, "tez</w>": 22664, "tf": 18828, "tf</w>": 5001, "tfc</w>": 30186, "tfl</w>": 29918, "tford</w>": 22493, "tful</w>": 17108, "tfw</w>": 16741, "tg": 7665, "tg</w>": 11981, "tgif</w>": 14483, "th": 513, "th</w>": 640, "tha": 18470, "tha</w>": 4715, "thab": 38219, "thad": 48339, "thai": 28054, "thai</w>": 8825, "thail": 7258, "thailand</w>": 7469, "thak": 22801, "thakur</w>": 38427, "thal": 7967, "thal</w>": 12323, "thala</w>": 17784, "thalai": 25206, "thalaivar</w>": 44918, "thalap": 39789, "thalapathy": 45405, "thalapathy</w>": 23324, "thall</w>": 36007, "tham": 11761, "tham</w>": 8896, "thames": 43472, "thames</w>": 15321, "than": 792, "than</w>": 1126, "thand": 44465, "thane</w>": 21463, "thang</w>": 24870, "thani</w>": 31322, "thank": 2790, "thank</w>": 1144, "thanked</w>": 32079, "thankful": 38839, "thankful</w>": 6217, "thankfully</w>": 22089, "thanking</w>": 21989, "thanks": 5672, "thanks</w>": 1085, "thanksgiving": 45732, "thanksgiving</w>": 6167, "thanku</w>": 45710, "thankyou": 18050, "thankyou</w>": 9911, "thanniversary</w>": 35564, "thanos</w>": 36709, "thanx</w>": 25095, "thar": 14396, "thar</w>": 38843, "thard</w>": 43474, "that": 6303, "that</w>": 682, "thatcher</w>": 32496, "thats": 44636, "thats</w>": 9254, "thaw": 26081, "thaw</w>": 47229, "thbewithyou</w>": 41067, "thc</w>": 20091, "thcentury</w>": 49111, "thd</w>": 28219, "thday</w>": 37801, "the": 599, "the</w>": 518, "thea": 15935, "thea</w>": 25429, "thead</w>": 25259, "theal": 45728, "thealth</w>": 31398, "thear": 43283, "theart": 44678, "theast</w>": 8378, "theastern</w>": 17877, "theat": 2263, "theater": 39438, "theater</w>": 6128, "theaters</w>": 14689, "theatre": 19857, "theatre</w>": 3292, "theatres</w>": 21680, "theatrical</w>": 26833, "theband</w>": 27695, "thebeatles</w>": 35645, "thebest": 40883, "thebest</w>": 25856, "thebig": 24732, "theblack": 47718, "thec": 48659, "thed</w>": 31405, "thedaily": 33550, "theday</w>": 4408, "thedream</w>": 39417, "thee": 44475, "thee</w>": 15108, "theeconomist</w>": 44518, "theellenshow</w>": 35342, "thefilm</w>": 31665, "theflash</w>": 25434, "theforce": 40002, "theforceawakens</w>": 48033, "theft</w>": 13286, "thefuture</w>": 34287, "thegame</w>": 24428, "thegood": 28594, "thegreat": 28721, "thei": 44522, "their</w>": 911, "theirs</w>": 29297, "thel": 5403, "thelast": 23495, "thelastjedi</w>": 47992, "theless</w>": 27712, "theli": 15277, "thelittle": 46872, "thelo": 47036, "thelove": 40668, "thelove</w>": 43200, "them": 5435, "them</w>": 1180, "themasters</w>": 48378, "theme": 38524, "theme</w>": 5849, "themed</w>": 10126, "themes</w>": 17849, "themet</w>": 48183, "themovie</w>": 27062, "themselves</w>": 6503, "then": 5929, "then</w>": 1594, "thenburg</w>": 45209, "thene": 17012, "thenew": 24212, "thenext": 47881, "thenight</w>": 43336, "theno": 37172, "thenorth</w>": 34338, "theo": 17043, "theo</w>": 18084, "theod": 26653, "theodore</w>": 30743, "theological</w>": 41162, "theology</w>": 24095, "theon</w>": 34653, "theone</w>": 46231, "theopen</w>": 41438, "theore": 22690, "theoretical</w>": 35585, "theori": 34804, "theories</w>": 23937, "theory</w>": 7143, "thepeople</w>": 33597, "thepersonal": 29981, "thepersonalnetwork</w>": 30016, "thephoto": 18303, "thephotohour</w>": 18607, "ther": 1160, "ther</w>": 743, "therap": 4499, "therapeu": 19332, "therapeutic</w>": 23240, "therapeutics</w>": 49101, "therapies</w>": 30179, "therapist</w>": 20608, "therapists</w>": 34763, "therapper</w>": 49340, "therapy</w>": 5257, "there": 5283, "there</w>": 997, "thereal": 8074, "thereal</w>": 41140, "thereby</w>": 43308, "thered</w>": 10208, "therefore</w>": 16865, "theres</w>": 18494, "theresa</w>": 14126, "therese</w>": 47996, "theresistance</w>": 22845, "theri": 28967, "theri</w>": 45297, "therine": 26807, "therine</w>": 9239, "thering</w>": 7891, "therland</w>": 25351, "thermal</w>": 13689, "thermo": 22303, "thermom": 31138, "thermometer</w>": 38172, "thermost": 42391, "thern": 10919, "thern</w>": 3137, "thero": 13165, "theroad</w>": 29807, "therock</w>": 30036, "theroy": 38146, "thers</w>": 1959, "thes": 40556, "thes</w>": 6460, "thescript</w>": 47061, "these": 40366, "these</w>": 1071, "theses</w>": 39388, "thesimpsons</w>": 45513, "thesims</w>": 34192, "thesis</w>": 10673, "thessal": 41491, "thessaloni": 41753, "thest</w>": 35343, "thesun</w>": 45617, "theta</w>": 27694, "thetic</w>": 7954, "thetimes</w>": 36039, "thevamp": 33701, "thevoice": 47206, "thevoice</w>": 30258, "thewalkingdead</w>": 18087, "thewanted</w>": 43008, "theworld": 44988, "theworld</w>": 17475, "thex": 35990, "they": 15174, "they</w>": 889, "theyre</w>": 28266, "thfc</w>": 17729, "thi": 2362, "thi</w>": 9111, "thia</w>": 17943, "thiago</w>": 44537, "thian</w>": 23214, "thians</w>": 28187, "thibau": 48351, "thic": 26107, "thic</w>": 11794, "thick": 18417, "thick</w>": 11006, "thicker</w>": 43302, "thickness</w>": 40754, "thief</w>": 18508, "thier</w>": 25595, "thierry</w>": 32929, "thieves</w>": 17899, "thigh": 47124, "thigh</w>": 22877, "thighs</w>": 30847, "thik</w>": 20512, "thika</w>": 44619, "thill</w>": 31266, "thim": 42331, "thin": 2178, "thin</w>": 7847, "thine</w>": 47192, "thing": 7499, "thing</w>": 946, "things": 30670, "things</w>": 1739, "thingsto": 43924, "thingy</w>": 36888, "think": 9820, "think</w>": 1331, "thinkbig": 26015, "thinkbigsundaywithmarsha</w>": 26666, "thinker</w>": 34577, "thinkers</w>": 32779, "thinkin</w>": 34443, "thinking</w>": 3291, "thinks</w>": 6109, "thinner</w>": 47247, "thir": 6030, "third": 32102, "third</w>": 3981, "thirds</w>": 42582, "thirst</w>": 23563, "thirsty": 39731, "thirsty</w>": 17521, "thirteen</w>": 34209, "thirty</w>": 20813, "thiru": 43292, "this": 4340, "this</w>": 589, "thisday</w>": 6532, "thisdayin": 33641, "thisdayinhistory</w>": 46913, "thisi": 7299, "thisis": 14887, "thismorning</w>": 36245, "thistle</w>": 29039, "thistory</w>": 28904, "thium</w>": 21804, "thletics</w>": 17765, "thm</w>": 10407, "thman</w>": 30079, "thms</w>": 19874, "thn": 44155, "thn</w>": 45587, "thnx</w>": 25480, "tho": 1325, "tho</w>": 5025, "thof": 18943, "thofjuly</w>": 21613, "thol": 29319, "thole</w>": 31029, "tholes</w>": 42465, "thology</w>": 9881, "thom": 2585, "thom</w>": 24094, "thomas": 12574, "thomas</w>": 3888, "thome</w>": 21289, "thomp": 37274, "thompson": 42181, "thompson</w>": 8535, "thomson</w>": 24151, "thon": 38776, "thon</w>": 8924, "thong</w>": 37058, "thood</w>": 15623, "thor": 4130, "thor</w>": 13691, "thora</w>": 46866, "thorn": 12957, "thorn</w>": 18466, "thorne</w>": 18025, "thorns</w>": 33650, "thornton</w>": 23592, "thorough": 15294, "thorough</w>": 34788, "thoroughbred</w>": 43248, "thoroughly</w>": 19750, "thorpe</w>": 18099, "thos</w>": 41965, "those</w>": 1753, "thot</w>": 33736, "thou": 1513, "thou</w>": 17781, "though</w>": 2846, "thought": 23948, "thought</w>": 2449, "thoughtful</w>": 19592, "thoughts</w>": 3618, "thour</w>": 27125, "thousand</w>": 9344, "thousands</w>": 7089, "thouse": 40318, "thouse</w>": 7819, "thoven</w>": 23078, "thr": 1111, "thr</w>": 19138, "thra": 17761, "thra</w>": 32797, "thrash</w>": 38262, "thre": 1607, "thread": 31108, "thread</w>": 8815, "threads</w>": 24957, "threat": 7527, "threat</w>": 7212, "threaten</w>": 26097, "threatened</w>": 16391, "threatening</w>": 16400, "threatens</w>": 20555, "threats</w>": 12766, "three": 21615, "three</w>": 2097, "thren</w>": 41776, "thresh": 29779, "threshold</w>": 33791, "threw</w>": 12746, "thri": 8713, "thrift</w>": 27779, "thrill</w>": 21023, "thrilled</w>": 7879, "thriller</w>": 9653, "thrilling</w>": 20101, "thrills</w>": 39829, "thrive</w>": 17669, "thriving</w>": 22677, "thro": 2101, "thro</w>": 28624, "throat</w>": 16371, "thrombo": 47585, "throne</w>": 15999, "thrones</w>": 8072, "throp</w>": 34939, "throttle</w>": 37139, "through": 6091, "through</w>": 1417, "throughout</w>": 6721, "throughs</w>": 48278, "throw": 3315, "throw</w>": 6293, "throwback": 6001, "throwback</w>": 5058, "throwbackthursday</w>": 6326, "thrower</w>": 40199, "throwing</w>": 9734, "thrown</w>": 15079, "throws</w>": 14723, "thru": 23856, "thru</w>": 6162, "thrush</w>": 46133, "thrust</w>": 40202, "ths</w>": 2079, "tht</w>": 23554, "thu": 3837, "thu</w>": 14153, "thub</w>": 25660, "thug": 37212, "thug</w>": 18137, "thugs</w>": 27686, "thul": 28368, "thulhu</w>": 37560, "thum": 14679, "thumb": 19514, "thumb</w>": 18674, "thumbnail</w>": 32365, "thumbs</w>": 17599, "thun": 32267, "thunder": 6161, "thunder</w>": 8951, "thunderbird</w>": 45131, "thunderbirds</w>": 44286, "thunderbolt</w>": 43596, "thunderstorm</w>": 12005, "thunderstorms</w>": 19525, "thunt</w>": 46763, "thur": 1837, "thur</w>": 21704, "thurman</w>": 41291, "thurs</w>": 9908, "thursday": 11218, "thursday</w>": 2221, "thursdaymotivation</w>": 39375, "thursdays</w>": 21444, "thursdaythoughts</w>": 14866, "thurst</w>": 33970, "thus</w>": 12457, "thusi": 9488, "thwaite</w>": 48469, "thweeksary</w>": 30871, "thx</w>": 5913, "thy": 7804, "thy</w>": 3362, "thyme</w>": 29805, "thyro": 25174, "thyroid</w>": 32558, "ti": 555, "ti</w>": 2605, "tia</w>": 6709, "tial</w>": 2826, "tially</w>": 14503, "tian": 23011, "tian</w>": 8125, "tians</w>": 35182, "tiara</w>": 38322, "tib": 47868, "tibet": 19927, "tibet</w>": 22234, "tibetan</w>": 24057, "tible</w>": 11453, "tic": 890, "tic</w>": 1550, "tica</w>": 9669, "tical": 34191, "tical</w>": 4342, "tically</w>": 13375, "ticals</w>": 30861, "tice</w>": 3122, "tich</w>": 48769, "tician</w>": 43358, "ticism</w>": 26491, "tick": 24640, "tick</w>": 15617, "ticket": 25740, "ticket</w>": 4500, "ticketing</w>": 44432, "tickets</w>": 2015, "ticking</w>": 35842, "tickle</w>": 42999, "ticks</w>": 40269, "tico</w>": 17670, "ticon</w>": 45996, "tics</w>": 2419, "ticul": 15538, "ticus</w>": 44277, "tid": 26002, "tid</w>": 23727, "tidal</w>": 21949, "tide": 15698, "tide</w>": 9105, "tides</w>": 25524, "tidy</w>": 23858, "tie": 14072, "tie</w>": 3422, "tied</w>": 9889, "tiem": 34762, "tien</w>": 47538, "tiene</w>": 43438, "tier": 14390, "tier</w>": 6598, "tierney</w>": 45693, "tiers</w>": 24604, "ties": 25556, "ties</w>": 2499, "tiest</w>": 18300, "tiesto</w>": 46367, "tif</w>": 23216, "tiff": 11112, "tiff</w>": 20699, "tiffany": 30467, "tiffany</w>": 14446, "tification</w>": 43923, "tified</w>": 40854, "tiful</w>": 29123, "tify</w>": 6677, "tig</w>": 31999, "tiger": 11954, "tiger</w>": 6531, "tigers</w>": 6934, "tigh": 31365, "tight": 25763, "tight</w>": 9123, "tighten</w>": 46653, "tighter</w>": 48193, "tightly</w>": 37568, "tights</w>": 29581, "tijuana</w>": 45273, "tik": 24986, "tik</w>": 32403, "tiki</w>": 30107, "til": 6124, "til</w>": 1763, "tile": 26217, "tile</w>": 8227, "tiles</w>": 10607, "tility</w>": 38180, "till": 17462, "till</w>": 4267, "tilla</w>": 26063, "tillerson</w>": 47738, "tilly</w>": 41199, "tilt</w>": 23601, "tim": 1292, "tim</w>": 3863, "timate</w>": 4754, "timb": 26627, "timber": 14441, "timber</w>": 16246, "timberlake</w>": 28274, "timbers</w>": 39911, "timberwolves</w>": 41190, "time": 3764, "time</w>": 788, "timed</w>": 32727, "timehop</w>": 19944, "timel": 23549, "timelapse</w>": 48154, "timeless</w>": 15558, "timeline</w>": 11492, "timely</w>": 19250, "timeout</w>": 41536, "timer</w>": 19725, "timers</w>": 44574, "times": 26445, "times</w>": 1661, "timesnow</w>": 45487, "timesof": 32522, "timesofindia</w>": 44182, "timetable</w>": 31971, "timeto": 29187, "timing</w>": 13624, "timm": 22444, "timmy</w>": 33252, "timo": 13390, "timo</w>": 33777, "timothy": 42087, "timothy</w>": 18560, "timp": 42166, "tin": 1310, "tin</w>": 5420, "tina</w>": 9257, "tinder</w>": 24287, "tine</w>": 22341, "ting": 7451, "ting</w>": 694, "tinged</w>": 44829, "tings</w>": 35332, "tini</w>": 26839, "tink": 39278, "tinker": 45272, "tinker</w>": 40910, "tino</w>": 20538, "tins</w>": 37359, "tint</w>": 40497, "tinted</w>": 42618, "tiny": 21716, "tiny</w>": 5591, "tio</w>": 27562, "tion": 2274, "tion</w>": 740, "tional": 22460, "tional</w>": 2986, "tionality</w>": 24514, "tionally</w>": 12409, "tionary</w>": 8381, "tione": 44318, "tioned</w>": 9083, "tioning</w>": 15528, "tionist</w>": 25732, "tions</w>": 1371, "tious</w>": 14255, "tip": 15383, "tip</w>": 4623, "tipoff</w>": 44521, "tipp": 32294, "tipped</w>": 31878, "tipper": 38095, "tipperary</w>": 45612, "tipping</w>": 27827, "tips</w>": 3173, "tipton</w>": 48809, "tiptuesday</w>": 42112, "tique</w>": 37772, "tir": 25467, "tir</w>": 38462, "tire": 29128, "tire</w>": 9362, "tired</w>": 6533, "tireless</w>": 39835, "tirelessly</w>": 41548, "tires</w>": 15533, "tiring</w>": 42630, "tiru": 36033, "tis": 7839, "tis</w>": 7394, "tise</w>": 13745, "tisgarh</w>": 40538, "tish": 45148, "tish</w>": 28784, "tism</w>": 27113, "tiss": 28155, "tissue</w>": 15368, "tissues</w>": 32172, "tist</w>": 7902, "tista</w>": 25580, "tists</w>": 25944, "tit": 1991, "tit</w>": 13202, "tita</w>": 40936, "titan": 13496, "titan</w>": 15516, "titanic</w>": 20729, "titanium</w>": 24409, "titans</w>": 13066, "titi": 17434, "titi</w>": 48504, "title": 28033, "title</w>": 3644, "titled</w>": 9939, "titles</w>": 9780, "tito</w>": 26838, "titus</w>": 36102, "tium</w>": 21975, "tiv": 1835, "tiva</w>": 41886, "tive": 14640, "tive</w>": 1420, "tively</w>": 9883, "tiveness</w>": 20955, "tives</w>": 7570, "tivity</w>": 9859, "tivo</w>": 32162, "tix</w>": 5835, "tiz</w>": 19376, "tj": 18890, "tj</w>": 18988, "tk": 22344, "tk</w>": 20676, "tko</w>": 37347, "tks</w>": 38739, "tl": 14325, "tl</w>": 8190, "tland</w>": 30697, "tlap</w>": 41976, "tlc</w>": 22047, "tle": 39141, "tle</w>": 5825, "tles</w>": 39363, "tless</w>": 17427, "tlot</w>": 41080, "tls</w>": 47367, "tly": 37483, "tly</w>": 1646, "tm": 9430, "tm</w>": 7789, "tman</w>": 20796, "tmc</w>": 35263, "tment</w>": 26485, "tml": 39445, "tmltalk</w>": 42260, "tmnt</w>": 32444, "tmobile</w>": 34901, "tmr</w>": 35906, "tmrw</w>": 16496, "tms</w>": 44496, "tmund</w>": 23801, "tmw</w>": 45827, "tmz</w>": 37248, "tn": 3827, "tn</w>": 7248, "tna</w>": 21150, "tnam</w>": 8079, "tner</w>": 34922, "tness</w>": 35212, "tney</w>": 9523, "tng</w>": 35898, "tnt</w>": 20659, "tnx</w>": 38220, "to": 580, "to</w>": 531, "toa": 17916, "toad</w>": 26096, "toast": 24654, "toast</w>": 10920, "toasted</w>": 23533, "toaster</w>": 39061, "toasty</w>": 44726, "tob</w>": 24260, "tobac": 12611, "tobacco</w>": 13905, "tobago</w>": 39482, "tobe": 17534, "tobe</w>": 28740, "tober": 18162, "tober</w>": 2925, "toberfest</w>": 26249, "tobi": 40335, "tobi</w>": 48374, "tobias</w>": 32464, "tobin</w>": 42466, "toby": 29659, "toby</w>": 18333, "toc": 41907, "toc</w>": 30643, "tock</w>": 25274, "tod": 38239, "tod</w>": 33568, "toda</w>": 47141, "todas</w>": 36150, "today": 11800, "today</w>": 721, "todayin": 32957, "todays</w>": 13513, "todayshow</w>": 29739, "todd": 10398, "todd</w>": 9951, "toddler</w>": 17772, "toddlers</w>": 36719, "toddy</w>": 38926, "todo": 48857, "todo</w>": 23087, "todos</w>": 33355, "toe": 47756, "toe</w>": 11344, "toes</w>": 16511, "tof": 6659, "toff</w>": 27319, "toffee</w>": 34880, "tofficial</w>": 47953, "tofthe": 23678, "toftheday</w>": 20566, "tofu</w>": 24692, "tog</w>": 45715, "toge": 1903, "together": 17858, "together</w>": 1952, "togo</w>": 26729, "tography</w>": 33968, "toh</w>": 26851, "toi": 7472, "toi</w>": 26941, "toid</w>": 49124, "toile": 43148, "toilet</w>": 11071, "toilets</w>": 24027, "toire</w>": 39534, "tok": 16690, "tok</w>": 27010, "token": 32634, "token</w>": 17134, "tokens</w>": 23562, "tokyo": 35038, "tokyo</w>": 6667, "tol": 4678, "tol</w>": 32962, "told</w>": 3527, "tole": 15677, "toledo</w>": 19812, "toler": 12150, "tolerance</w>": 20377, "tolerant</w>": 38536, "tolerate</w>": 35556, "tolkien</w>": 32989, "toll": 44090, "toll</w>": 14155, "tollywood</w>": 42016, "tology</w>": 34799, "tom": 999, "tom</w>": 2435, "toma": 42360, "toma</w>": 44710, "tomas": 35944, "tomas</w>": 27178, "tomat": 12041, "tomato</w>": 9867, "tomatoes</w>": 13004, "tomb": 37187, "tomb</w>": 15582, "tombs</w>": 48613, "tombstone</w>": 45729, "tome": 24137, "tome</w>": 24283, "tomi</w>": 46290, "tomlin</w>": 46649, "tomlinson</w>": 17484, "tommorow</w>": 42871, "tommy": 16573, "tommy</w>": 8876, "tomo": 31223, "tomo</w>": 34434, "tomor": 1277, "tomorrow": 19728, "tomorrow</w>": 1293, "tomorrowland</w>": 34951, "tomorrows</w>": 32258, "tomorrowspaper": 35005, "tomorrowspaperstoday</w>": 35190, "tomp": 43544, "tompkins</w>": 49068, "toms</w>": 10545, "tomy</w>": 18730, "ton": 838, "ton</w>": 917, "tona</w>": 13459, "tone": 32366, "tone</w>": 8408, "toned</w>": 29426, "toner</w>": 40614, "tones</w>": 14744, "tong</w>": 21510, "tonga</w>": 37882, "tongue": 44820, "tongue</w>": 13626, "tongues</w>": 39837, "toni": 17766, "toni</w>": 17171, "tonic</w>": 17808, "tonics</w>": 34647, "tonight</w>": 1009, "tonights</w>": 23312, "tonite</w>": 13449, "tonka</w>": 42781, "tonline</w>": 45867, "tonne</w>": 42450, "tonnes</w>": 24813, "tons</w>": 7555, "tony": 9150, "tony</w>": 4767, "tonyawards</w>": 46068, "too": 1843, "too</w>": 1256, "took</w>": 2280, "tool": 13718, "tool</w>": 5999, "toolbox</w>": 46599, "toolkit</w>": 29849, "tools</w>": 5771, "toom": 27550, "toon": 24664, "toon</w>": 19701, "toonami</w>": 48336, "toons</w>": 35345, "toor": 42590, "tooth": 15316, "tooth</w>": 12030, "toothbrush</w>": 36841, "toothpaste</w>": 37322, "tooting</w>": 42969, "top": 5534, "top</w>": 1253, "topaz</w>": 46125, "tope": 32149, "tope</w>": 42239, "topeka</w>": 46884, "topia</w>": 29618, "topic</w>": 8720, "topical</w>": 37464, "topics</w>": 11916, "topless</w>": 37415, "topo": 23008, "topoli</w>": 30152, "topp": 19529, "topped</w>": 12588, "topper</w>": 31780, "toppers</w>": 41651, "topping</w>": 21071, "toppings</w>": 47554, "topps</w>": 20201, "tops</w>": 8154, "topshop</w>": 40953, "topus</w>": 21495, "tor": 937, "tor</w>": 1208, "tora</w>": 45147, "torah</w>": 37945, "toral</w>": 45282, "torch": 31921, "torch</w>": 15820, "tore": 38066, "tore</w>": 19385, "tored</w>": 38046, "torg</w>": 33214, "tori": 17689, "tori</w>": 17539, "toria</w>": 23732, "torial</w>": 28029, "torian</w>": 48399, "tories</w>": 14193, "torino</w>": 29178, "torio</w>": 34235, "torn": 8572, "torn</w>": 18023, "tornad": 24676, "tornado</w>": 9062, "tornadoes</w>": 28254, "toro</w>": 17892, "toron": 37407, "toronto": 16866, "toronto</w>": 4514, "torpe": 34093, "torpedo</w>": 46582, "torquay</w>": 45738, "torque</w>": 31940, "torre": 39563, "torre</w>": 38009, "torrent</w>": 42317, "torrential</w>": 41158, "torres</w>": 16049, "tors</w>": 2546, "tortilla</w>": 32683, "torto": 24170, "tortoise</w>": 30178, "torture</w>": 16013, "tortured</w>": 29900, "tory": 29390, "tory</w>": 4214, "tos</w>": 6094, "tosc": 37719, "tose</w>": 38154, "tosh</w>": 17109, "toshi": 31744, "toss</w>": 19656, "tossed</w>": 31296, "tot": 4618, "tot</w>": 23659, "total": 13507, "total</w>": 4445, "totally</w>": 5440, "totals</w>": 25772, "tote": 48145, "tote</w>": 19031, "totem</w>": 45376, "totes</w>": 37199, "tothe": 12222, "toto</w>": 39823, "tots</w>": 24978, "totten": 14360, "tottenham</w>": 14889, "tou": 1879, "tou</w>": 29261, "touch": 9480, "touch</w>": 4526, "touchdown</w>": 18664, "touchdowns</w>": 37905, "touched</w>": 13190, "touches</w>": 14832, "touching</w>": 14088, "touchscreen</w>": 39095, "tough": 12063, "tough</w>": 5499, "tougher</w>": 33722, "toughest</w>": 23773, "toughness</w>": 45522, "toulou": 27145, "toulouse</w>": 30267, "tour": 2710, "tour</w>": 1760, "tourde": 39247, "toured</w>": 27654, "touri": 4224, "touring</w>": 11853, "tourism": 23661, "tourism</w>": 6556, "tourist</w>": 12123, "tourists</w>": 15546, "tournament</w>": 4097, "tournaments</w>": 23058, "tourney</w>": 12603, "tours</w>": 8948, "tous</w>": 37424, "tout</w>": 22300, "touts</w>": 41274, "tov</w>": 28970, "tow": 11557, "tow</w>": 18653, "toward</w>": 8508, "towards</w>": 4447, "towed</w>": 45419, "towel</w>": 15953, "towels</w>": 26578, "tower": 26669, "tower</w>": 4730, "towering</w>": 39444, "towers</w>": 12701, "towie</w>": 44613, "towin</w>": 45819, "towing</w>": 36963, "town": 4068, "town</w>": 1605, "townfc</w>": 33981, "townhall</w>": 33408, "townhouse</w>": 40178, "towns</w>": 14173, "townsend</w>": 26826, "township</w>": 14622, "townsville</w>": 47330, "towork</w>": 48233, "tox": 7742, "tox</w>": 16145, "toxic": 27436, "toxic</w>": 12348, "toxicity</w>": 41234, "toxin</w>": 48899, "toxins</w>": 36618, "toy": 14387, "toy</w>": 5988, "toya</w>": 37602, "toyo": 7644, "toyota</w>": 8908, "toys": 39508, "toys</w>": 7162, "tp": 23760, "tp</w>": 15188, "tpp</w>": 29411, "tps</w>": 35246, "tq</w>": 43066, "tr": 635, "tr</w>": 6337, "tra": 752, "tra</w>": 2483, "trac": 2266, "trace": 48611, "trace</w>": 14767, "traced</w>": 47956, "traces</w>": 30913, "tracey</w>": 25558, "tracing</w>": 27897, "track": 10887, "track</w>": 2700, "tracked</w>": 27049, "tracker</w>": 18123, "tracking</w>": 10428, "tracklist</w>": 39777, "tracks</w>": 7579, "tract</w>": 4690, "traction</w>": 10644, "tractor</w>": 14607, "tractors</w>": 37854, "tracy": 32984, "tracy</w>": 15508, "trad": 48716, "trad</w>": 38037, "trade": 10457, "trade</w>": 3629, "traded</w>": 18860, "trademark</w>": 25011, "trader</w>": 17700, "traders</w>": 19112, "trades</w>": 18519, "trading": 40083, "trading</w>": 6520, "tradio</w>": 20689, "tradition": 20838, "tradition</w>": 8784, "traditional": 41113, "traditional</w>": 5604, "traditionally</w>": 35532, "traditions</w>": 18016, "traf": 3227, "trafal": 32461, "trafalgar</w>": 36969, "traff": 31571, "traffic": 12080, "traffic</w>": 3399, "trafficking</w>": 15983, "trafford</w>": 22912, "trage": 12430, "tragedy</w>": 14082, "tragic</w>": 14828, "tragically</w>": 39599, "trail": 11523, "trail</w>": 4921, "trailblazer</w>": 41015, "trailblazers</w>": 35954, "trailer</w>": 4700, "trailers</w>": 24862, "trailing</w>": 37427, "trails</w>": 10633, "train": 9122, "train</w>": 3231, "trained</w>": 10874, "trainee</w>": 25795, "trainees</w>": 30382, "trainer</w>": 9767, "trainers</w>": 18871, "training": 34508, "training</w>": 2199, "trains</w>": 9541, "trait</w>": 35160, "traitor</w>": 31760, "traitors</w>": 42633, "traits</w>": 25748, "trajec": 42042, "trak</w>": 24065, "tral</w>": 14609, "tram": 9800, "tram</w>": 17500, "tramp</w>": 46289, "trampol": 32905, "trampoline</w>": 42800, "tramrahim</w>": 35220, "tran": 1357, "tran</w>": 22031, "trance": 30584, "trance</w>": 18671, "trancefamily</w>": 39630, "trane</w>": 35779, "tranqu": 18912, "tranquil</w>": 35764, "tranquility</w>": 36688, "trans": 1826, "trans</w>": 8126, "transaction</w>": 24881, "transactions</w>": 21653, "transat": 37872, "transatlantic</w>": 40703, "transc": 21073, "transcend": 47087, "transcript</w>": 39008, "transcription</w>": 48765, "transfer": 22659, "transfer</w>": 7134, "transferred</w>": 29700, "transferring</w>": 40924, "transfers</w>": 21621, "transform": 8142, "transform</w>": 12288, "transformation": 34204, "transformation</w>": 7832, "transformational</w>": 47135, "transformationtuesday</w>": 36511, "transformative</w>": 38106, "transformed</w>": 17453, "transformer</w>": 38235, "transformers</w>": 17843, "transforming": 44470, "transforming</w>": 19251, "transforms</w>": 30312, "transgender</w>": 17732, "transi": 32236, "transit</w>": 10174, "transiti": 22939, "transition</w>": 11391, "transitional</w>": 41519, "transitioning</w>": 43586, "transitions</w>": 39374, "transl": 12243, "translate</w>": 22655, "translated</w>": 20752, "translates</w>": 36334, "translating</w>": 42156, "translation</w>": 12153, "translations</w>": 41367, "translator</w>": 36230, "translucent</w>": 49052, "transm": 18861, "transmission</w>": 16103, "transmitted</w>": 48605, "transmitter</w>": 40457, "transp": 11726, "transpa": 18524, "transparen": 16108, "transparency</w>": 16828, "transparent</w>": 19017, "transpl": 16038, "transplant": 41871, "transplant</w>": 18771, "transplantation</w>": 45207, "transpor": 19406, "transport": 10231, "transport</w>": 7362, "transportation</w>": 10911, "transported</w>": 29089, "transporter</w>": 43568, "transporting</w>": 42259, "trap": 36224, "trap</w>": 9677, "trape": 42435, "trapped</w>": 15592, "traps</w>": 28517, "tras</w>": 30638, "trash": 39215, "trash</w>": 9798, "traum": 22263, "trauma</w>": 13846, "traumati": 46613, "traumatic</w>": 29958, "trav": 7586, "trav</w>": 46955, "trave": 35357, "travel": 2824, "travel</w>": 1949, "travelblog</w>": 35957, "travelblogger</w>": 25494, "travelchat</w>": 46455, "traveled</w>": 20384, "traveler</w>": 17794, "travelers</w>": 20644, "travelgram</w>": 40069, "traveling</w>": 9365, "travelled</w>": 23428, "traveller</w>": 22546, "travellers</w>": 29583, "travelling</w>": 11190, "travelphotography</w>": 22808, "travelpics</w>": 32293, "travels</w>": 11472, "traveltips</w>": 36260, "traveltuesday</w>": 16713, "traverse</w>": 35058, "travi": 46971, "travis": 27441, "travis</w>": 12287, "traw": 42288, "trax</w>": 34421, "tray": 38470, "tray</w>": 14621, "trays</w>": 39798, "trc</w>": 41803, "tre": 975, "tre</w>": 6033, "treach": 46005, "tread": 26182, "tread</w>": 35658, "treadmill</w>": 37780, "treas": 8591, "treason</w>": 28103, "treasure</w>": 9922, "treasured</w>": 48068, "treasurer</w>": 26985, "treasures</w>": 16500, "treasury</w>": 20956, "treat": 3968, "treat</w>": 3901, "treated</w>": 9772, "treating</w>": 13842, "treatment</w>": 4869, "treatments</w>": 15839, "treats</w>": 8878, "treaty</w>": 19967, "treble</w>": 33194, "trecht</w>": 33812, "tree": 13354, "tree</w>": 2677, "treehouse</w>": 42387, "trees</w>": 4682, "trek": 13236, "trek</w>": 8136, "trekking</w>": 25293, "trell</w>": 35159, "tremb": 44043, "tremend": 14659, "tremendous</w>": 15988, "tren": 2579, "trench</w>": 23846, "trenches</w>": 38723, "trend": 19986, "trend</w>": 6643, "trending</w>": 6087, "trends</w>": 7015, "trendsetter</w>": 46666, "trendy</w>": 23072, "trent": 45885, "trent</w>": 15548, "trenton</w>": 37470, "tres</w>": 23569, "tress</w>": 4733, "tresses</w>": 24273, "trevor": 23437, "trevor</w>": 13219, "trex</w>": 42114, "trey": 36670, "trey</w>": 16939, "tri": 924, "tri</w>": 9618, "triad</w>": 45602, "trial</w>": 5991, "trials</w>": 10992, "triangle</w>": 14615, "triathlon</w>": 18080, "trib</w>": 45151, "tribal</w>": 16629, "tribe": 19943, "tribe</w>": 11365, "tribeca</w>": 35184, "tribes</w>": 26546, "tribu": 3028, "tribun": 14311, "tribunal</w>": 32911, "tribune</w>": 18556, "tribute</w>": 5493, "tributes</w>": 15537, "tric": 9511, "tric</w>": 4081, "trich</w>": 39519, "trick": 17177, "trick</w>": 8172, "tricks</w>": 13177, "tricky</w>": 22319, "trics</w>": 31437, "trident</w>": 35491, "tridge</w>": 18722, "tried</w>": 4554, "tries</w>": 4315, "trife": 48962, "trigge": 30509, "trigger</w>": 16158, "triggered</w>": 30924, "triggers</w>": 37319, "tright</w>": 29915, "tril": 40626, "trill</w>": 39297, "trilli": 39350, "trillion</w>": 20160, "trilo": 15183, "trilogy</w>": 16862, "trim</w>": 14182, "trimmed</w>": 40657, "trin": 6628, "trinidad</w>": 26244, "trinity": 30744, "trinity</w>": 12267, "trio</w>": 10263, "trip": 23421, "trip</w>": 2529, "tripad": 37189, "tripadvisor</w>": 38708, "triple": 16519, "triple</w>": 7673, "triplets</w>": 48601, "tripod</w>": 36141, "tripoli</w>": 40095, "trippin</w>": 43073, "tripping</w>": 35229, "trippy</w>": 35137, "trips</w>": 12292, "tris</w>": 29690, "trish": 40511, "trish</w>": 37179, "trisha</w>": 39152, "tristan</w>": 25497, "trit": 37087, "triton</w>": 45437, "triu": 14782, "trium": 21065, "triumph": 26507, "triumph</w>": 15307, "triumphant</w>": 41918, "trivi": 21228, "trivia</w>": 10642, "triviatuesday</w>": 45499, "trix</w>": 41017, "tro": 1046, "tro</w>": 3332, "trock</w>": 44368, "trojan</w>": 30653, "trojans</w>": 25310, "trol": 10306, "troll": 39737, "troll</w>": 17103, "trolley</w>": 25124, "trolling</w>": 28552, "trolls</w>": 20890, "tromb": 32390, "trombone</w>": 44423, "tron": 19057, "tron</w>": 10684, "tronic</w>": 34258, "tronics</w>": 34397, "troom</w>": 23691, "troop": 12492, "troop</w>": 24054, "trooper</w>": 18327, "troopers</w>": 23576, "troops</w>": 10109, "trop</w>": 31585, "trope</w>": 41150, "trophies</w>": 20998, "trophy": 42676, "trophy</w>": 6502, "tropic": 21794, "tropic</w>": 36736, "tropical": 41699, "tropical</w>": 8686, "tropics</w>": 36940, "tros</w>": 40456, "trose</w>": 36022, "trot</w>": 30453, "trotter</w>": 38287, "trou": 5181, "troubad": 49037, "trouble": 25669, "trouble</w>": 7848, "troubled</w>": 25568, "troubles</w>": 27254, "trough</w>": 39761, "troupe</w>": 34803, "trous": 19727, "trousers</w>": 23172, "trout</w>": 14853, "trove</w>": 45350, "trow</w>": 46914, "troy": 26283, "troy</w>": 12819, "trs</w>": 24770, "tru": 931, "tru</w>": 25326, "truck": 14781, "truck</w>": 4629, "trucker</w>": 45918, "truckers</w>": 43404, "trucking</w>": 26208, "trucks</w>": 9569, "trude</w>": 39017, "trudeau</w>": 15752, "true": 13096, "true</w>": 2328, "truec": 37583, "truelove</w>": 45711, "truffle</w>": 23064, "truffles</w>": 37057, "truly</w>": 4545, "trum": 11766, "trum</w>": 11399, "truman</w>": 29414, "trump": 9124, "trump</w>": 1797, "trumpet</w>": 23681, "trumpp": 45550, "trumprussia</w>": 39135, "trumps</w>": 29793, "trumptrain</w>": 43595, "trun": 16163, "trun</w>": 46661, "trunk</w>": 18347, "trunks</w>": 38531, "truro</w>": 43507, "truss</w>": 46080, "trust": 17691, "trust</w>": 3876, "truste": 17356, "trusted</w>": 16538, "trustee</w>": 30803, "trustees</w>": 28853, "trusting</w>": 33221, "trusts</w>": 27507, "trustworthy</w>": 46840, "trusty</w>": 37955, "truth": 21335, "truth</w>": 4319, "truths</w>": 27179, "trx</w>": 31620, "try": 4487, "try</w>": 1209, "tryin</w>": 31085, "trying</w>": 2551, "tryna</w>": 15702, "tryout</w>": 43832, "tryouts</w>": 28053, "ts": 2290, "ts</w>": 590, "tsa</w>": 25977, "tsal</w>": 20438, "tsb</w>": 45015, "tsc</w>": 37437, "tsch</w>": 38778, "tsd</w>": 20611, "tse": 49144, "tsfor": 42654, "tsford</w>": 32823, "tsh": 42872, "tshirt</w>": 14907, "tshirts</w>": 29377, "tsi": 40048, "tsi</w>": 37867, "tsk</w>": 43600, "tsla</w>": 35681, "tsm</w>": 43452, "tsman</w>": 20046, "tsn": 44921, "tsn</w>": 26896, "tson": 42353, "tson</w>": 47140, "tsp</w>": 34230, "tsu": 13950, "tsu</w>": 20175, "tsun": 19155, "tsunami</w>": 24286, "tsville</w>": 29080, "tt": 971, "tt</w>": 1402, "tta</w>": 2646, "ttc</w>": 27668, "tte": 23105, "tte</w>": 3070, "tted</w>": 15163, "tten": 11351, "tten</w>": 17479, "tter": 18691, "tter</w>": 5165, "tters</w>": 6318, "ttes</w>": 9293, "tti</w>": 5237, "ttin</w>": 36589, "tting</w>": 1188, "ttino</w>": 47389, "ttip</w>": 46993, "ttle</w>": 9253, "ttm</w>": 46838, "tto": 8759, "tto</w>": 8105, "tton</w>": 10562, "ttot</w>": 12480, "ttp</w>": 30828, "ttr": 47589, "tts</w>": 11570, "ttt</w>": 17256, "tttt</w>": 33119, "ttu</w>": 44006, "ttv</w>": 24281, "tty": 11457, "tty</w>": 1856, "tu": 764, "tu</w>": 5760, "tua</w>": 41344, "tual</w>": 4799, "tuan</w>": 37297, "tub": 34907, "tub</w>": 15450, "tube": 38229, "tube</w>": 3308, "tuber": 30371, "tuberculo": 42606, "tuberculosis</w>": 43129, "tubes</w>": 22870, "tubing</w>": 40794, "tubs</w>": 41705, "tubular</w>": 48786, "tuc": 14456, "tuc</w>": 43871, "tuck</w>": 22398, "tucked</w>": 26923, "tucker": 39703, "tucker</w>": 15726, "tucket</w>": 32677, "tucson</w>": 17250, "tudor</w>": 24547, "tue</w>": 17515, "tues": 2283, "tues</w>": 12113, "tuesday": 10209, "tuesday</w>": 2519, "tuesdaymotivation</w>": 25432, "tuesdays</w>": 23195, "tuesdaythoughts</w>": 17988, "tuf": 44510, "tuff</w>": 38868, "tug": 47032, "tug</w>": 27902, "tuition</w>": 21129, "tuk": 39271, "tuk</w>": 14993, "tul": 9069, "tul</w>": 40837, "tula</w>": 36332, "tulane</w>": 44893, "tulip</w>": 28389, "tulips</w>": 30886, "tulsa</w>": 18850, "tum": 12932, "tum</w>": 8843, "tumb": 8831, "tumble</w>": 38284, "tumbler</w>": 48790, "tumbling</w>": 46226, "tumblr</w>": 11841, "tummy</w>": 26053, "tumor</w>": 22616, "tumors</w>": 39894, "tumour</w>": 45129, "tun": 1415, "tun</w>": 21349, "tuna</w>": 15037, "tundra</w>": 39899, "tune": 11427, "tune</w>": 3300, "tuned</w>": 5898, "tunein</w>": 16809, "tuner</w>": 42905, "tunes": 31688, "tunes</w>": 10810, "tunesapp</w>": 32550, "tung": 47940, "tung</w>": 31092, "tuni": 16270, "tunic</w>": 43495, "tuning</w>": 19585, "tunisia</w>": 23346, "tunnel</w>": 11096, "tunnels</w>": 29814, "tuous</w>": 28738, "tup": 37956, "tup</w>": 4507, "tupac</w>": 31506, "tups</w>": 44855, "tur": 985, "tur</w>": 17182, "tura</w>": 16127, "tural": 45143, "tural</w>": 4261, "turb": 18973, "turban</w>": 48515, "turbine</w>": 26880, "turbines</w>": 38863, "turbo": 23578, "turbo</w>": 13668, "turbul": 31100, "turbulent</w>": 47871, "ture": 4321, "ture</w>": 941, "tured</w>": 3987, "turer</w>": 11993, "turers</w>": 16956, "tures</w>": 2400, "turf": 36762, "turf</w>": 12510, "turi": 11896, "turin</w>": 36251, "turing</w>": 5812, "turismo</w>": 30202, "turk": 8254, "turk</w>": 32507, "turkey": 35977, "turkey</w>": 4790, "turkeys</w>": 37991, "turkish": 48199, "turkish</w>": 9278, "turks</w>": 34344, "turmeric</w>": 34044, "turmoil</w>": 37751, "turn": 5522, "turn</w>": 2105, "turnaround</w>": 32719, "turnbull</w>": 27863, "turned</w>": 3771, "turner": 42867, "turner</w>": 8777, "turning</w>": 4976, "turno": 21377, "turnout</w>": 11654, "turnover</w>": 30794, "turnpike</w>": 38301, "turns</w>": 3185, "turnt</w>": 28887, "turntable</w>": 37953, "turnup</w>": 30591, "turo</w>": 29224, "turquo": 19390, "turquoise</w>": 19899, "turt": 13716, "turtle": 35943, "turtle</w>": 10912, "turtles</w>": 17862, "tus": 24828, "tus</w>": 7079, "tusc": 17909, "tuscal": 42638, "tuscaloosa</w>": 44375, "tuscan</w>": 42865, "tuscany</w>": 20885, "tuss": 31741, "tut</w>": 35121, "tutor": 10054, "tutor</w>": 27858, "tutorial</w>": 12857, "tutorials</w>": 30973, "tutoring</w>": 37532, "tutti</w>": 46880, "tutu</w>": 35845, "tux": 28720, "tux</w>": 49186, "tuxedo</w>": 40173, "tv": 3197, "tv</w>": 1583, "tvc</w>": 49190, "tvd</w>": 25889, "tvmiaw</w>": 38554, "tvn</w>": 44232, "tvs</w>": 27114, "tvtime</w>": 19947, "tvxq</w>": 43968, "tw": 966, "tw</w>": 12842, "twa</w>": 46954, "twain</w>": 30689, "twal": 48126, "tware</w>": 5707, "twc</w>": 41217, "twd": 29440, "twd</w>": 19343, "twdfamily</w>": 38218, "twe": 18365, "tweak</w>": 48870, "tweaks</w>": 42661, "twee": 1330, "tweed</w>": 26904, "tweeps</w>": 14928, "tweet": 11826, "tweet</w>": 1842, "tweeta": 32024, "tweetapicture": 40596, "tweeted</w>": 7841, "tweeter</w>": 32876, "tweeters</w>": 31713, "tweeting</w>": 8901, "tweets</w>": 3560, "tweetyour": 45033, "twel": 14476, "twelf": 39443, "twelfth</w>": 44072, "twell": 38722, "twell</w>": 30162, "twelve</w>": 19694, "twent": 27027, "twenti": 35167, "twenty</w>": 13016, "twentyon": 39609, "twentyonepilots</w>": 40007, "twer": 13923, "twerk</w>": 28506, "twi": 5537, "twice</w>": 6970, "twick": 34326, "twickenham</w>": 39619, "twil": 12804, "twili": 35754, "twilight": 46366, "twilight</w>": 14512, "twill</w>": 43703, "twin": 9342, "twin</w>": 6769, "twine</w>": 42775, "twinkle</w>": 36545, "twinning</w>": 30156, "twinpeaks</w>": 32042, "twins</w>": 8040, "twist</w>": 10589, "twisted</w>": 18233, "twister</w>": 45933, "twists</w>": 34149, "twit": 1643, "twit</w>": 18704, "twitart</w>": 27709, "twitch": 13251, "twitch</w>": 9153, "twitter": 7546, "twitter</w>": 1989, "twitterkurds</w>": 32722, "twitterstorians</w>": 35389, "two": 17211, "two</w>": 1237, "twol": 31964, "twood": 40404, "twood</w>": 13245, "twp</w>": 33283, "twright</w>": 46778, "twt</w>": 6825, "twx</w>": 26830, "twy": 45861, "tx": 6636, "tx</w>": 5200, "txhsfb</w>": 34757, "txlege</w>": 26995, "txst</w>": 40761, "txt</w>": 24595, "txwx</w>": 22995, "ty": 1260, "ty</w>": 744, "tya</w>": 41273, "tycoon</w>": 36803, "tye</w>": 43097, "tyfree</w>": 41215, "tyga</w>": 41952, "tying</w>": 22559, "tyl</w>": 47537, "tyler": 14787, "tyler</w>": 7058, "tym</w>": 45772, "tyne": 27000, "tyne</w>": 29729, "tyour": 16823, "type": 15673, "type</w>": 3877, "typed</w>": 40753, "typeface</w>": 44969, "types</w>": 7543, "typewriter</w>": 42180, "typho": 17486, "typhoon</w>": 21110, "typic": 21648, "typical</w>": 9854, "typically</w>": 23175, "typing</w>": 20102, "typo": 18831, "typo</w>": 29076, "typography</w>": 24332, "tyr": 15590, "tyran": 46921, "tyranny</w>": 35402, "tyre": 38330, "tyre</w>": 16864, "tyres</w>": 21376, "tyrone</w>": 30226, "tyson</w>": 16616, "tz": 7710, "tz</w>": 4983, "tzer</w>": 45267, "tzky</w>": 47127, "tzman</w>": 46032, "tzu</w>": 34354, "té": 27208, "té</w>": 39694, "u": 84, "u</w>": 340, "ua": 34075, "ua</w>": 8441, "uaap": 46753, "uaap</w>": 43774, "uab</w>": 35587, "uae</w>": 9752, "ual</w>": 1921, "ually</w>": 10767, "uan</w>": 33062, "uas</w>": 38339, "uav</w>": 30303, "ub": 18430, "ub</w>": 13494, "uba</w>": 29768, "ubc": 42479, "ubc</w>": 29455, "ube</w>": 30892, "uber": 25896, "uber</w>": 10668, "ubi": 26758, "ubio</w>": 32867, "ubiquit": 48129, "ubis": 28248, "ubisoft</w>": 32051, "ubs</w>": 43851, "ubun": 28184, "ubuntu</w>": 30791, "uc": 4903, "uc</w>": 12438, "uca</w>": 30942, "ucc": 44844, "ucc</w>": 29138, "ucci</w>": 30746, "uccino</w>": 30409, "ucd": 44746, "ucd</w>": 43514, "ucf</w>": 24414, "uch": 19465, "uch</w>": 22394, "uchi": 37473, "uci": 46354, "uci</w>": 28925, "uck</w>": 34189, "ucl": 12013, "ucl</w>": 13647, "ucla": 37667, "ucla</w>": 17259, "ucn</w>": 49036, "uconn</w>": 30549, "ud": 6560, "ud</w>": 5765, "uda</w>": 22800, "udaipur</w>": 49385, "uddin</w>": 43035, "ude": 37016, "ude</w>": 35194, "ue": 16696, "ue</w>": 1190, "uefa</w>": 19189, "uel</w>": 24231, "uer</w>": 45951, "ues</w>": 2526, "uf": 17777, "uf</w>": 19230, "ufc": 20396, "ufc</w>": 6490, "uff</w>": 45701, "ufo</w>": 19443, "ufos</w>": 48234, "ug": 3754, "ug</w>": 16061, "uga</w>": 16056, "ugand": 25965, "uganda</w>": 11125, "ugandan</w>": 44206, "ugby</w>": 30658, "ugh": 39736, "ugh</w>": 12755, "ugliest</w>": 43543, "ugly": 36070, "ugly</w>": 8159, "ugu</w>": 18144, "uh": 17661, "uh</w>": 9219, "uhc</w>": 44974, "uhh</w>": 35938, "uhhh</w>": 45270, "uhm</w>": 35614, "uhur": 29434, "uhuru</w>": 35690, "ui": 17326, "ui</w>": 11458, "uil</w>": 29395, "uit": 30696, "uit</w>": 47584, "uj": 33266, "uji</w>": 39672, "uk": 2294, "uk</w>": 1432, "uka</w>": 23294, "uke": 48836, "uke</w>": 28577, "uked": 48987, "uki": 37435, "uki</w>": 9009, "ukin": 34996, "ukip</w>": 20360, "uklabour</w>": 36902, "ukmfg</w>": 38764, "uko</w>": 33562, "ukone</w>": 24682, "ukrain": 15468, "ukraine</w>": 7768, "ukrainian</w>": 16927, "ukrunchat</w>": 34481, "uku": 29541, "uku</w>": 36082, "ukulele</w>": 39094, "ul": 914, "ul</w>": 6625, "ula": 34104, "ula</w>": 9506, "ular</w>": 4927, "ulary</w>": 21701, "ulate</w>": 20467, "ulation</w>": 32896, "ule</w>": 35616, "ules</w>": 26274, "ulf</w>": 49331, "uli": 41841, "uli</w>": 22174, "ull": 33254, "ulla</w>": 30577, "ullah</w>": 45310, "ullivan</w>": 45252, "ulls</w>": 37418, "ulo": 46084, "ulo</w>": 36738, "ulous": 42490, "ulous</w>": 4281, "ulously</w>": 20167, "ulster": 29709, "ulster</w>": 24639, "ult</w>": 4380, "ulti": 11925, "ulties</w>": 21884, "ultimat": 16522, "ultimate": 34684, "ultimate</w>": 5377, "ultimatefan": 48372, "ultimatefanlive</w>": 48644, "ultimately</w>": 23023, "ultr": 25636, "ultra": 11398, "ultra</w>": 8118, "ultram": 44519, "ultrasound</w>": 29717, "ulture</w>": 22272, "ulty</w>": 8036, "ulu": 41815, "ulu</w>": 15659, "ulum</w>": 17235, "uly": 33220, "ulysses</w>": 46114, "um": 1622, "um</w>": 1008, "uma": 29982, "uma</w>": 9256, "uman</w>": 27112, "umar</w>": 25656, "umass</w>": 39390, "umatic</w>": 45006, "umb": 7493, "umber": 19195, "umbrel": 34773, "umbrella</w>": 17143, "umbrellas</w>": 42782, "umbria</w>": 39287, "umc</w>": 39491, "umd</w>": 42067, "ume": 38480, "umen</w>": 42832, "uments</w>": 25924, "umer</w>": 23539, "umes</w>": 21403, "umi": 48772, "umi</w>": 15458, "umich": 41294, "umin</w>": 31542, "umm": 26129, "umm</w>": 21215, "ummer</w>": 47628, "ummm</w>": 33665, "umni</w>": 31739, "ump": 22224, "umpire</w>": 36214, "ums</w>": 8643, "umu</w>": 39788, "un": 569, "un</w>": 2271, "una</w>": 6385, "unable</w>": 17793, "unacceptable</w>": 25234, "unanim": 20800, "unanimous</w>": 33520, "unanimously</w>": 31798, "unanswered</w>": 43611, "unarmed</w>": 41541, "unas</w>": 41366, "unavailable</w>": 48430, "unaware</w>": 33347, "unbeat": 37056, "unbeatable</w>": 40267, "unbeaten</w>": 19228, "unbeliev": 11383, "unbelievable</w>": 13306, "unbelievably</w>": 33781, "unborn</w>": 37257, "unboxing</w>": 32866, "unbreakable</w>": 32956, "unbroken</w>": 49271, "unc": 24921, "unc</w>": 15322, "uncanny</w>": 32556, "uncertain</w>": 30384, "uncertainty</w>": 23956, "unch</w>": 1527, "unchanged</w>": 34272, "uncharted</w>": 34560, "unci": 25521, "unciation</w>": 34117, "uncle": 31537, "uncle</w>": 8002, "unclear</w>": 32955, "uncles</w>": 45335, "uncomfortable</w>": 22470, "uncommon</w>": 34888, "uncondition": 46561, "unconditional</w>": 31112, "unconscious</w>": 34791, "unconstitutional</w>": 43585, "unconventional</w>": 39440, "uncover</w>": 33031, "uncovered</w>": 28234, "uncture</w>": 38736, "uncut</w>": 41056, "und": 9762, "und</w>": 9732, "unda</w>": 39932, "undant</w>": 25377, "unday</w>": 29338, "unde</w>": 45226, "undead</w>": 40105, "undecided</w>": 49368, "undefeated</w>": 15326, "undeni": 38424, "under": 1473, "under</w>": 1798, "underage</w>": 45669, "underattack</w>": 35075, "undercover</w>": 21595, "underdog</w>": 44266, "undere": 21675, "underestim": 23348, "underestimate</w>": 31794, "undergo</w>": 31545, "undergoing</w>": 26419, "undergrad</w>": 38331, "undergraduate</w>": 24320, "underground</w>": 9396, "undering</w>": 30826, "underlying</w>": 31812, "undermine</w>": 42839, "underneath</w>": 20857, "underrated</w>": 19494, "unders</w>": 20376, "understand": 47582, "understand</w>": 4600, "understanding</w>": 7522, "understands</w>": 21607, "understatement</w>": 38296, "understood</w>": 17303, "undertaker</w>": 40144, "undertaking</w>": 49067, "undertale</w>": 48283, "underthe": 41161, "underwater</w>": 14760, "underway</w>": 6273, "underwear</w>": 21154, "underwood</w>": 21474, "underworld</w>": 34760, "undi</w>": 23845, "undisclosed</w>": 39334, "undo</w>": 35454, "undocumented</w>": 35414, "undoub": 38836, "undoubtedly</w>": 42204, "undp</w>": 26691, "une": 4522, "une</w>": 10966, "unearth": 32716, "unearthed</w>": 36632, "unemp": 15139, "unemployed</w>": 32721, "unemployment</w>": 19350, "unes</w>": 6394, "unesco</w>": 16216, "uneven</w>": 43204, "unex": 9484, "unexpe": 10802, "unexpec": 31829, "unexpected</w>": 12293, "unexpectedly</w>": 35622, "unf": 29285, "unfair</w>": 22193, "unfinished</w>": 26526, "unfit</w>": 45367, "unfold</w>": 38681, "unfollow</w>": 38797, "unfor": 14010, "unforgettable</w>": 16173, "unfortun": 10194, "unfortunate</w>": 22361, "unfortunately</w>": 12863, "unfpa</w>": 45048, "ung": 10439, "ung</w>": 4334, "unga</w>": 19151, "ungsoo</w>": 25582, "unh": 25365, "unhappy</w>": 26528, "unhcr</w>": 43451, "unhealthy</w>": 30994, "uni": 1107, "uni</w>": 5926, "unic": 7648, "unicef": 38286, "unicef</w>": 19259, "unicorn</w>": 15660, "unicorns</w>": 35183, "unidenti": 33707, "unidentified</w>": 35563, "unification</w>": 45036, "unified</w>": 20876, "uniform</w>": 11075, "uniforms</w>": 17838, "unil": 32388, "unilever</w>": 48654, "uniof": 21218, "union": 14210, "union</w>": 3503, "unions</w>": 18353, "unis": 30482, "unis</w>": 39266, "unisex</w>": 27609, "unison</w>": 46694, "unit": 28522, "unit</w>": 5695, "unite": 15078, "unite</w>": 11305, "uniteblue</w>": 20935, "united": 10898, "united</w>": 2690, "unitedstates</w>": 39636, "unitedway": 47486, "unites</w>": 32061, "uniting</w>": 31318, "units</w>": 10394, "unity": 38300, "unity</w>": 8581, "univ": 36680, "univ</w>": 14896, "univer": 15574, "univers": 5855, "universal": 19148, "universal</w>": 8754, "universe</w>": 6104, "universi": 41692, "universit": 26019, "universities</w>": 16408, "university": 40728, "university</w>": 2182, "universityof": 46158, "unk</w>": 5542, "unknown</w>": 8685, "unl</w>": 43807, "unlawful</w>": 42305, "unle": 19677, "unlea": 23893, "unleash</w>": 26706, "unleashed</w>": 27955, "unless</w>": 10602, "unlike</w>": 16694, "unlikely</w>": 18904, "unlimited</w>": 11015, "unlock</w>": 18649, "unlocked</w>": 16770, "unlocking</w>": 40810, "unlucky</w>": 35029, "unlv</w>": 42283, "unmanned</w>": 36751, "unmatched</w>": 46054, "unn</w>": 38364, "unnamed</w>": 44985, "unnecessary</w>": 24100, "unner</w>": 31481, "unning</w>": 43282, "unnoticed</w>": 42807, "uno": 32446, "uno</w>": 17078, "unofficial</w>": 22506, "unpacking</w>": 43589, "unpaid</w>": 32811, "unparalleled</w>": 44396, "unplugged</w>": 31724, "unpopular</w>": 40232, "unprece": 23054, "unprecedented</w>": 23344, "unpredictable</w>": 38684, "unra": 45150, "unreal": 46980, "unreal</w>": 15636, "unrelated</w>": 38644, "unreleased</w>": 29654, "unrest</w>": 36452, "uns</w>": 25908, "unsafe</w>": 32071, "unsc</w>": 36395, "unseen</w>": 19069, "unsigned</w>": 39346, "unsolved</w>": 40836, "unsplash</w>": 46196, "unstable</w>": 34730, "unstopp": 22105, "unstoppable</w>": 23484, "unsuccessful</w>": 47478, "unsung</w>": 33015, "unsure</w>": 26396, "unt": 19654, "unt</w>": 6537, "until</w>": 1942, "untitled</w>": 21309, "unto</w>": 19801, "untold</w>": 32206, "untouch": 44509, "untouched</w>": 42764, "unused</w>": 29636, "unusual</w>": 12613, "unusually</w>": 36465, "unve": 6685, "unveil</w>": 20483, "unveiled</w>": 13572, "unveiling</w>": 20327, "unveils</w>": 15057, "unwanted</w>": 25285, "unwind</w>": 34064, "unya</w>": 37142, "uo": 30874, "uo</w>": 36162, "uof": 11155, "uoft</w>": 37329, "uon": 48144, "uous</w>": 40185, "up": 1083, "up</w>": 705, "upa</w>": 31727, "upbeat</w>": 39201, "upcoming</w>": 4196, "upcycled</w>": 46552, "upd": 3226, "update</w>": 2491, "updated</w>": 5974, "updates</w>": 4904, "updating</w>": 22792, "uper": 38082, "uper</w>": 33056, "upfront</w>": 42064, "upgrade</w>": 10365, "upgraded</w>": 18577, "upgrades</w>": 21253, "upgrading</w>": 34368, "uph": 14128, "uphill</w>": 42767, "uphol": 26195, "uphold</w>": 43897, "upholstery</w>": 44556, "upl": 41939, "uplift</w>": 45389, "uplifting</w>": 29546, "upload</w>": 13968, "uploaded</w>": 16793, "uploading</w>": 30145, "upon": 23524, "upon</w>": 5067, "upp": 19549, "upp</w>": 45946, "upper": 22465, "upper</w>": 7067, "upri": 15982, "upright</w>": 29818, "uprising</w>": 26006, "upro": 28922, "ups</w>": 6926, "upscale</w>": 47501, "upset</w>": 11214, "upsets</w>": 42637, "upside</w>": 15362, "upstairs</w>": 21387, "upstate</w>": 33335, "upstream</w>": 45517, "upthe": 31510, "upto</w>": 26575, "upton</w>": 31910, "uptown</w>": 23807, "upward</w>": 32526, "upwards</w>": 34915, "uq</w>": 39591, "ur": 565, "ur</w>": 1775, "ura": 29337, "ura</w>": 3544, "urable</w>": 40194, "ural": 23547, "ural</w>": 33948, "uran</w>": 16197, "uranium</w>": 29850, "urban": 7931, "urban</w>": 5800, "urbanart</w>": 40834, "urd</w>": 47880, "urday</w>": 19742, "urdu</w>": 29976, "ure": 5514, "ure</w>": 726, "ured</w>": 4210, "urer</w>": 20864, "ures</w>": 2288, "urg</w>": 35995, "urge</w>": 14852, "urged</w>": 23790, "urgency</w>": 47612, "urgent</w>": 13693, "urgently</w>": 34534, "urges</w>": 16692, "urging</w>": 27748, "uri": 11052, "uri</w>": 8699, "urie</w>": 46429, "urin": 45245, "urine</w>": 28864, "uring</w>": 1351, "url</w>": 23464, "urn</w>": 38075, "uro": 17343, "uro</w>": 5925, "urology</w>": 48585, "urope</w>": 14918, "urs</w>": 4794, "urself</w>": 31942, "urst</w>": 19181, "urstruly": 34751, "urstrulymahesh</w>": 35314, "ursula</w>": 38390, "urt</w>": 24309, "uru": 16322, "uru</w>": 11768, "uruguay</w>": 27931, "urus</w>": 14246, "urve": 24583, "ury": 8642, "ury</w>": 2106, "us": 904, "us</w>": 718, "usa": 9491, "usa</w>": 2547, "usability</w>": 46736, "usable</w>": 22890, "usaf</w>": 25017, "usage</w>": 19137, "usaid</w>": 34507, "usair": 36742, "usairforce</w>": 42179, "usarmy</w>": 19132, "usatoday</w>": 40263, "usav": 36056, "usb</w>": 10281, "usc": 13346, "usc</w>": 14995, "uscg</w>": 43932, "usd</w>": 7485, "usda</w>": 25829, "use": 4419, "use</w>": 1483, "used": 32289, "used</w>": 2026, "useful</w>": 9784, "useless</w>": 20154, "usemb": 39700, "user": 21248, "user</w>": 7031, "username</w>": 28162, "users</w>": 7433, "uses</w>": 5282, "useum</w>": 45189, "usf": 32385, "usf</w>": 28942, "usgs</w>": 35103, "ush": 12001, "ush</w>": 18335, "usher</w>": 27411, "ushi</w>": 47734, "usi</w>": 25540, "usic": 34909, "usic</w>": 16753, "using</w>": 1996, "usky</w>": 45778, "usl</w>": 42113, "usm</w>": 40041, "usmc</w>": 21678, "usmnt</w>": 30662, "usn</w>": 40579, "usnavy</w>": 24500, "usnews</w>": 43752, "uso</w>": 21539, "usopen</w>": 21782, "usp": 26651, "usps</w>": 39980, "usrc</w>": 33274, "uss": 11545, "uss</w>": 9260, "ussia</w>": 29553, "ussoccer</w>": 42828, "ussr</w>": 32697, "ust": 35501, "ust</w>": 24725, "usu": 4254, "usu</w>": 40434, "usual</w>": 6129, "usually</w>": 8296, "usur": 45582, "uswnt</w>": 35255, "ut": 1419, "ut</w>": 3641, "uta": 42706, "uta</w>": 25925, "utah": 27474, "utah</w>": 9312, "utc</w>": 18196, "utd</w>": 10493, "ute": 16856, "ute</w>": 3130, "uten": 32089, "uter": 39197, "utes</w>": 2850, "uth": 48819, "uth</w>": 44750, "uti</w>": 24568, "util": 28824, "utili": 17015, "utilities</w>": 27210, "utility</w>": 14941, "utilize</w>": 36861, "utilized</w>": 47604, "utilizing</w>": 40212, "utm</w>": 47853, "utmost</w>": 42352, "uto": 18866, "uto</w>": 13683, "utopia</w>": 34433, "utpol</w>": 42605, "utr": 48726, "utrecht</w>": 37216, "uts</w>": 11740, "utsa</w>": 37528, "utt": 17096, "uttar</w>": 40168, "uttarak": 33755, "uttarakhand</w>": 35655, "utter": 18769, "utter</w>": 24558, "utterly</w>": 21353, "utto</w>": 42183, "utv</w>": 36351, "utz</w>": 45320, "uu": 5702, "uu</w>": 14553, "uuu": 44355, "uuu</w>": 27656, "uuuu": 16720, "uuuu</w>": 40797, "uv": 23777, "uv</w>": 15977, "uva</w>": 23908, "uw": 13933, "uw</w>": 19166, "uwe</w>": 48785, "uwu</w>": 35544, "ux": 9251, "ux</w>": 6213, "uy": 31929, "uy</w>": 48113, "uz": 19398, "uz</w>": 36991, "uzbe": 43007, "uzbekistan</w>": 45024, "uzzi</w>": 48210, "v": 85, "v</w>": 341, "va": 4648, "va</w>": 1892, "vaa</w>": 37488, "vable</w>": 23088, "vac": 3125, "vac</w>": 34085, "vaca</w>": 48215, "vacancies</w>": 26333, "vacancy</w>": 21247, "vacant</w>": 25262, "vacation": 28336, "vacation</w>": 6561, "vacations</w>": 29002, "vacay</w>": 44716, "vacc": 13342, "vaccin": 19164, "vaccinated</w>": 48134, "vaccination</w>": 32518, "vaccine": 47780, "vaccine</w>": 17493, "vaccines</w>": 25860, "vach": 46211, "vacu": 16058, "vacuum</w>": 18420, "vad": 11880, "vada</w>": 46759, "vader</w>": 21908, "vae</w>": 39384, "vag": 13015, "vague</w>": 42154, "vah</w>": 26921, "vai": 26893, "vai</w>": 36802, "vail</w>": 21189, "vain</w>": 25538, "vais</w>": 28719, "vaj": 34206, "vak": 16288, "vak</w>": 41597, "val": 1214, "val</w>": 1560, "vala</w>": 48525, "valdez</w>": 40617, "vale": 35554, "vale</w>": 10820, "valedic": 43525, "valen": 12630, "valence</w>": 30225, "valenci": 34183, "valencia</w>": 16559, "valent": 3655, "valent</w>": 15300, "valentin</w>": 48631, "valentina</w>": 43741, "valentine": 11208, "valentine</w>": 5876, "valentines</w>": 10259, "valentinesday</w>": 12369, "valentino</w>": 29624, "valeri": 31951, "valerie</w>": 25592, "valet</w>": 45749, "vali": 8230, "valiant</w>": 33804, "valid</w>": 15126, "validation</w>": 32536, "valkyrie</w>": 42326, "vall": 23523, "vall</w>": 35295, "vallarta</w>": 47874, "valle": 24857, "valle</w>": 29105, "valley": 18354, "valley</w>": 3136, "valleys</w>": 28649, "valor</w>": 30930, "vals</w>": 7431, "valu": 6291, "valuable</w>": 10056, "valuation</w>": 25894, "value": 41358, "value</w>": 4602, "valued</w>": 17801, "values</w>": 8857, "valve</w>": 17001, "valves</w>": 33517, "vam": 9983, "vamo": 46718, "vamos</w>": 30346, "vamp": 10680, "vampi": 47017, "vampire": 47576, "vampire</w>": 13220, "vampires</w>": 30868, "vamps</w>": 44810, "van": 2446, "van</w>": 2451, "vana</w>": 20543, "vanc": 6320, "vance</w>": 31447, "vancou": 6750, "vancouver": 31904, "vancouver</w>": 7208, "vand": 11691, "vandalism</w>": 45664, "vander": 16264, "vanderbilt</w>": 33524, "vandy": 39268, "vane</w>": 43828, "vaness": 13328, "vanessa</w>": 16836, "vangogh</w>": 47849, "vanguard</w>": 27916, "vani": 15396, "vani</w>": 26459, "vania</w>": 10998, "vanilla</w>": 11974, "vanished</w>": 43783, "vanishing</w>": 48296, "vanity": 48353, "vanity</w>": 22938, "vans</w>": 11711, "vant": 26298, "vantage</w>": 31749, "vanu": 42892, "vanuatu</w>": 48766, "vap": 10462, "vape": 25423, "vape</w>": 20219, "vaping</w>": 29403, "vapor": 37167, "vapor</w>": 30729, "vapori": 46183, "var": 3187, "var</w>": 12998, "vara</w>": 47492, "varan": 36585, "varanasi</w>": 39364, "vard": 21866, "vard</w>": 8773, "vardy</w>": 47371, "vare": 38159, "vares</w>": 42895, "vargas</w>": 32752, "vari": 3354, "variable</w>": 26416, "varian</w>": 34334, "variant</w>": 20293, "variants</w>": 38312, "variation</w>": 26420, "variations</w>": 29025, "varied</w>": 32334, "varies</w>": 32543, "varieties</w>": 23805, "variety</w>": 8396, "various</w>": 7395, "varsity": 43716, "varsity</w>": 8574, "varun": 48120, "varun</w>": 22069, "vary</w>": 18855, "varying</w>": 36456, "vas": 5669, "vas</w>": 5995, "vasc</w>": 40995, "vascular</w>": 19218, "vase</w>": 20431, "vasi": 49092, "vast": 24413, "vast</w>": 16414, "vastly</w>": 48257, "vat": 11588, "vat</w>": 18363, "vatican</w>": 21030, "vation</w>": 37884, "vau": 6391, "vaugh": 25158, "vaughan</w>": 21392, "vaughn</w>": 29013, "vaul": 27469, "vault</w>": 15240, "vaus</w>": 40217, "vaux": 27403, "vauxhall</w>": 29173, "vaw</w>": 47952, "vay": 48000, "vaz": 38142, "vb": 29365, "vb</w>": 8778, "vball</w>": 38329, "vc": 28670, "vc</w>": 7952, "vcs</w>": 43528, "vcu</w>": 40102, "vd</w>": 9515, "vday</w>": 42055, "ve": 673, "ve</w>": 563, "vea</w>": 43798, "veal</w>": 36616, "veau</w>": 24419, "vec": 19912, "vector": 40453, "vector</w>": 21533, "ved": 19515, "ved</w>": 1102, "veda</w>": 44401, "vedere</w>": 45660, "vedi</w>": 47971, "vee": 35708, "vee</w>": 17073, "veen</w>": 22432, "veer": 21243, "veer</w>": 22058, "veg": 9048, "veg</w>": 16460, "vega</w>": 22930, "vegan": 15705, "vegan</w>": 5615, "vegans</w>": 48514, "vegas": 20288, "vegas</w>": 4413, "vege": 6219, "vegetable</w>": 15725, "vegetables</w>": 14119, "vegetarian</w>": 14600, "vegetation</w>": 33947, "veggie</w>": 19401, "veggies</w>": 16767, "vehic": 3973, "vehicle</w>": 5299, "vehicles</w>": 8361, "veil</w>": 23516, "vein</w>": 29169, "veins</w>": 28867, "veit</w>": 30620, "vel": 942, "vel</w>": 1287, "vela</w>": 34898, "veld</w>": 34011, "veled</w>": 15370, "veli</w>": 49166, "veling</w>": 37970, "vell": 21173, "vell</w>": 32997, "velo": 14357, "velo</w>": 33850, "velocity</w>": 23811, "vels</w>": 5109, "velve": 37849, "velvet</w>": 11063, "vely</w>": 1708, "vember</w>": 3477, "vement</w>": 3129, "vements</w>": 11104, "ven": 1240, "ven</w>": 1638, "vena</w>": 47442, "vend": 10851, "vending</w>": 29202, "vendor</w>": 21261, "vendors</w>": 20353, "vene": 5365, "veness</w>": 10516, "venetian</w>": 34336, "venezia</w>": 34139, "venezu": 10939, "venezuela</w>": 12839, "venezuelan</w>": 34699, "veng</w>": 31526, "venge": 27757, "vengeance</w>": 32057, "veni</w>": 31142, "venice</w>": 11010, "vening</w>": 47532, "venison</w>": 40037, "venom": 42491, "venom</w>": 21588, "vens</w>": 20884, "vent": 4373, "vent</w>": 5687, "ventil": 39522, "ventilation</w>": 35066, "venting</w>": 15731, "vention</w>": 4122, "vents</w>": 12833, "ventu": 48217, "ventura</w>": 20921, "venture": 37046, "venture</w>": 12543, "ventures</w>": 20829, "venue</w>": 5097, "venues</w>": 18120, "venus</w>": 14691, "ver": 624, "ver</w>": 667, "vera</w>": 13350, "verage</w>": 3725, "verb</w>": 34952, "verbal</w>": 26522, "verbally</w>": 39985, "verbs</w>": 45687, "verde</w>": 16935, "verdi</w>": 42306, "verdict</w>": 18030, "vere": 11135, "vere</w>": 34707, "vered</w>": 2868, "verge</w>": 23913, "veri": 11638, "verification</w>": 33521, "verified</w>": 22555, "verify</w>": 34722, "vering</w>": 4630, "veriz": 19707, "verizon</w>": 21532, "verma</w>": 41261, "vermont</w>": 19241, "vern": 2214, "vern</w>": 12586, "verne</w>": 45553, "vernon</w>": 18348, "vero": 45217, "vero</w>": 38208, "verona</w>": 31819, "veronic": 39551, "veronica</w>": 24039, "vers": 1219, "vers</w>": 2094, "versa</w>": 35765, "versace</w>": 25422, "versail": 29857, "versailles</w>": 32129, "versary</w>": 2940, "versatile</w>": 18110, "versatility</w>": 41340, "verse": 39466, "verse</w>": 3131, "verses</w>": 30769, "versi": 8934, "version</w>": 3273, "versions</w>": 16190, "versity</w>": 1906, "verst": 42484, "verstappen</w>": 45064, "versus</w>": 14548, "versy</w>": 18522, "vert</w>": 11742, "verte": 35158, "verted</w>": 48173, "verti": 30459, "vertical</w>": 14293, "vertigo</w>": 42477, "verton</w>": 40632, "verts</w>": 37265, "very": 11698, "very</w>": 1070, "veryday</w>": 37944, "verything</w>": 45174, "ves": 9616, "ves</w>": 1003, "vesmatter</w>": 47636, "vespa</w>": 46029, "vessel</w>": 16387, "vessels</w>": 22822, "vest": 31657, "vest</w>": 12473, "vesti": 40349, "vests</w>": 41906, "vet": 12294, "vet</w>": 5951, "veter": 4330, "veteran": 20797, "veteran</w>": 8814, "veterans</w>": 7092, "veteransday</w>": 26409, "veterin": 43959, "veterinary</w>": 25458, "veto</w>": 36570, "vets</w>": 13113, "vette</w>": 17045, "vettel</w>": 28700, "vevo</w>": 35141, "vex": 36187, "vex</w>": 43978, "vey": 34792, "vey</w>": 3884, "vez": 35987, "vez</w>": 17226, "vf</w>": 25966, "vfl</w>": 33726, "vfx</w>": 30149, "vg": 40591, "vg</w>": 22346, "vh": 46953, "vh</w>": 23847, "vhs</w>": 21932, "vi": 603, "vi</w>": 4259, "via</w>": 1048, "viable</w>": 25752, "viadu": 37012, "viaduct</w>": 39113, "vial</w>": 39951, "vian": 40487, "vian</w>": 16124, "vibe": 37974, "vibe</w>": 12813, "vibes</w>": 7764, "vibr": 9527, "vibrant</w>": 14270, "vibration</w>": 37456, "vibrations</w>": 43660, "vic": 1555, "vic</w>": 4412, "vica</w>": 46168, "vicar</w>": 43899, "vice": 43572, "vice</w>": 6931, "vicente</w>": 39411, "vices</w>": 8332, "vich</w>": 24143, "vici": 46670, "vicious</w>": 25177, "vick": 15116, "vick</w>": 29704, "vickers</w>": 48452, "vicki</w>": 34927, "vicky": 37176, "vicky</w>": 25788, "victi": 6861, "victim</w>": 9133, "victims</w>": 7131, "victor": 2423, "victor</w>": 10690, "victori": 17555, "victoria": 39286, "victoria</w>": 6127, "victorian</w>": 12350, "victorias": 47791, "victories</w>": 24577, "victorious</w>": 24033, "victory": 36668, "victory</w>": 4127, "vid": 17233, "vid</w>": 9284, "vida</w>": 19015, "vidal</w>": 36678, "vide": 1334, "vide</w>": 45244, "video": 9478, "video</w>": 1455, "videogame</w>": 35097, "videogames</w>": 21149, "videos</w>": 6081, "vids</w>": 23035, "vidy": 29639, "vidya</w>": 45264, "vie": 922, "vie</w>": 8538, "vien": 36493, "vienna</w>": 12670, "vier": 15352, "vier</w>": 11987, "viera</w>": 21114, "viernes</w>": 33826, "vies</w>": 22458, "viest</w>": 31979, "viet": 17558, "viet</w>": 13128, "vietnam": 19558, "vietnam</w>": 8623, "vietnamese</w>": 22382, "view": 12004, "view</w>": 1093, "viewed</w>": 7226, "viewer</w>": 15061, "viewers</w>": 14275, "viewing</w>": 7124, "viewpoint</w>": 41604, "views</w>": 2758, "vig": 8549, "vig</w>": 45083, "vigil": 21538, "vigil</w>": 19896, "vigilant</w>": 43026, "vigne": 40447, "vigne</w>": 34581, "vigo</w>": 44097, "vigor": 26781, "vii</w>": 17759, "viii</w>": 20414, "vijay": 12014, "vijay</w>": 10823, "vijaysethu": 47966, "vik": 10764, "vik</w>": 17181, "vika</w>": 39562, "vikas</w>": 37116, "viking": 26663, "viking</w>": 15897, "vikings</w>": 11713, "vikram": 41136, "vikram</w>": 24314, "viktor</w>": 36101, "vil": 1338, "vil</w>": 3000, "vila</w>": 37505, "vile</w>": 27247, "vill": 10481, "vill</w>": 45698, "villa": 3203, "villa</w>": 7754, "village": 34584, "village</w>": 4331, "villagers</w>": 34283, "villages</w>": 17621, "villain</w>": 15425, "villains</w>": 25271, "villanova</w>": 44025, "villar": 35164, "villas</w>": 28907, "ville": 11110, "ville</w>": 1930, "villen": 46177, "villi": 36907, "vimeo</w>": 48720, "vin": 1379, "vin</w>": 2558, "vina</w>": 35682, "vinai": 37396, "vinaigrette</w>": 39876, "vinay": 43952, "vince": 32429, "vince</w>": 6236, "vincen": 33402, "vincent": 29069, "vincent</w>": 10357, "vinci</w>": 30199, "vind</w>": 20275, "vindic": 39582, "vine": 8471, "vine</w>": 7721, "vinegar</w>": 23834, "vines</w>": 21268, "vineyard</w>": 16527, "vineyards</w>": 23082, "ving": 5375, "ving</w>": 903, "vingne</w>": 42579, "vings</w>": 22510, "vini</w>": 48119, "vinnie</w>": 40885, "vinny</w>": 36794, "vino</w>": 14509, "vinod</w>": 43348, "vins</w>": 34820, "vinson</w>": 45945, "vintag": 10936, "vintage": 13654, "vintage</w>": 3266, "viny": 40990, "vinyl": 22835, "vinyl</w>": 5754, "vio": 11913, "vio</w>": 20324, "viol": 3164, "viola</w>": 27438, "violate</w>": 44875, "violated</w>": 38192, "violating</w>": 37554, "violation</w>": 22919, "violations</w>": 21969, "violence</w>": 5450, "violent</w>": 11565, "violently</w>": 47758, "violet</w>": 16118, "violets</w>": 42861, "violin</w>": 17058, "violinist</w>": 36299, "vion</w>": 35496, "vious</w>": 6418, "viously</w>": 7149, "vip": 45714, "vip</w>": 7111, "viper</w>": 27401, "vips</w>": 41149, "vir": 1790, "vir</w>": 25319, "vira</w>": 35910, "viral</w>": 11653, "virat</w>": 32473, "virgil</w>": 39076, "virgin": 5651, "virgin</w>": 12103, "virgini": 43426, "virginia</w>": 6728, "virgo</w>": 39978, "viro": 32301, "viron": 38309, "virtu": 7977, "virtual": 18059, "virtual</w>": 7790, "virtually</w>": 22475, "virtualreality</w>": 32608, "virtue</w>": 26860, "virtues</w>": 42167, "virtuoso</w>": 47027, "virus</w>": 11808, "viruses</w>": 34830, "vis": 1301, "vis</w>": 5337, "visa</w>": 12802, "visas</w>": 41228, "vise</w>": 24977, "vised</w>": 14810, "vish": 12024, "vish</w>": 29124, "vishal</w>": 33648, "vishnu</w>": 37816, "visi": 1409, "visibility</w>": 15921, "visible": 36658, "visible</w>": 8626, "vising</w>": 37439, "vision": 11147, "vision</w>": 2515, "visional</w>": 24627, "visionary</w>": 22959, "visions</w>": 13804, "visit": 3388, "visit</w>": 1600, "visitation</w>": 44370, "visited</w>": 5580, "visiting</w>": 4680, "visitor</w>": 13881, "visitors</w>": 9160, "visits</w>": 8489, "visitscotland</w>": 28760, "visitspain</w>": 48860, "vism</w>": 15514, "viso</w>": 46732, "visor</w>": 24217, "vist</w>": 21436, "vista</w>": 13865, "visu": 7739, "visual": 17004, "visual</w>": 7195, "visualization</w>": 28500, "visualize</w>": 45057, "visually</w>": 25743, "visuals</w>": 21315, "viswas": 36513, "viswasam</w>": 47664, "vit": 4056, "vit</w>": 35580, "vita</w>": 15700, "vital": 32525, "vital</w>": 10585, "vitality</w>": 36385, "vitam": 9856, "vitamin</w>": 13675, "vitamins</w>": 22582, "vito</w>": 36725, "vity</w>": 4893, "vitz</w>": 26188, "vius</w>": 41571, "viv": 21827, "viv</w>": 35363, "viva</w>": 17399, "vival": 35920, "vive": 18980, "vive</w>": 24004, "vivek</w>": 36243, "vivi": 11625, "vivian</w>": 30129, "vivid</w>": 22984, "vivo": 28091, "vivo</w>": 25888, "vix": 28976, "vix</w>": 34811, "vixen</w>": 38757, "vixx</w>": 32106, "viz": 28251, "viz</w>": 31786, "vj": 45439, "vj</w>": 30827, "vk</w>": 41893, "vl": 37580, "vl</w>": 36442, "vla": 23686, "vlad</w>": 41089, "vladi": 19320, "vladimir</w>": 21702, "vlive</w>": 46797, "vlog</w>": 18894, "vm": 16204, "vm</w>": 20269, "vma</w>": 35666, "vmas</w>": 30236, "vmware</w>": 29615, "vn": 47098, "vn</w>": 25076, "vo": 947, "vo</w>": 3951, "voc": 4105, "voc</w>": 20855, "vocab": 21346, "vocabulary</w>": 23804, "vocal": 34037, "vocal</w>": 13147, "vocali": 19134, "vocalist</w>": 22102, "vocals</w>": 17666, "vocation</w>": 20521, "vocational</w>": 33751, "vod": 11820, "vod</w>": 35854, "vodaf": 28436, "vodafone</w>": 38695, "vodka</w>": 13646, "vogel</w>": 44960, "vogue": 24418, "vogue</w>": 13178, "voic": 29185, "voice": 13179, "voice</w>": 3386, "voiced</w>": 34352, "voiceof": 44966, "voiceover</w>": 41979, "voices</w>": 9144, "void</w>": 21561, "voip</w>": 42762, "voir</w>": 16036, "vol": 1343, "vol</w>": 7945, "volatile</w>": 41022, "volatility</w>": 32355, "volcan": 9916, "volcanic</w>": 24072, "volcano</w>": 14581, "volcanoes</w>": 38055, "voli</w>": 40138, "volk": 13432, "volkswag": 14407, "volkswagen</w>": 15342, "volley": 7130, "volley</w>": 34656, "volleyball</w>": 7458, "volo</w>": 44791, "vols</w>": 20404, "volt</w>": 26430, "volta": 29879, "volta</w>": 33480, "voltage</w>": 23118, "voltron</w>": 39314, "volu": 3563, "volume</w>": 8284, "volumes</w>": 22651, "volun": 3356, "voluntar": 48823, "voluntary</w>": 23815, "volunte": 3556, "volunteer": 32331, "volunteer</w>": 7114, "volunteered</w>": 34000, "volunteering</w>": 14902, "volunteers</w>": 5939, "volution</w>": 24043, "volved</w>": 42888, "volvo": 39991, "volvo</w>": 16906, "vom": 24198, "vomit</w>": 46485, "von": 11269, "von</w>": 8497, "voo": 19497, "voodoo</w>": 26869, "voor": 34291, "voor</w>": 34464, "vor": 8338, "vor</w>": 5308, "vore</w>": 18215, "vortex</w>": 30071, "vos</w>": 16863, "vot</w>": 48558, "vote": 6830, "vote</w>": 2187, "voted</w>": 6454, "votel": 41379, "voter": 44474, "voter</w>": 14065, "voters</w>": 8925, "votes</w>": 6693, "voting</w>": 5756, "vou": 11045, "voucher</w>": 18190, "vouchers</w>": 23384, "vous</w>": 10636, "vow</w>": 34787, "vows</w>": 21677, "vox": 29215, "vox</w>": 22692, "voy": 10622, "voy</w>": 15021, "voyage</w>": 16299, "voyager</w>": 29669, "vp": 32758, "vp</w>": 3896, "vpn</w>": 38212, "vr": 16840, "vr</w>": 5921, "vre": 44500, "vre</w>": 17501, "vs": 11385, "vs</w>": 1547, "vsco": 26752, "vsco</w>": 32822, "vscocam</w>": 34694, "vsky</w>": 37791, "vss</w>": 31919, "vt": 31732, "vt</w>": 10291, "vu": 8664, "vu</w>": 13230, "vue": 43915, "vue</w>": 19313, "vuel": 31312, "vuelta</w>": 43856, "vuitton</w>": 26705, "vul": 6856, "vulcan</w>": 34767, "vulner": 11213, "vulnerability</w>": 28797, "vulnerable</w>": 14332, "vulture</w>": 34593, "vultures</w>": 47197, "vv": 19264, "vv</w>": 35686, "vw": 28650, "vw</w>": 13250, "vx</w>": 47644, "vy": 11566, "vy</w>": 5157, "w": 86, "w</w>": 342, "wa": 869, "wa</w>": 2663, "waa</w>": 35874, "wab": 19893, "wab</w>": 36852, "wac": 27445, "wac</w>": 37947, "wack": 22880, "wack</w>": 38270, "wacky</w>": 34318, "waco</w>": 36035, "wad": 11133, "wad</w>": 30451, "wada</w>": 40006, "wade": 40237, "wade</w>": 14180, "wadi</w>": 37253, "waf": 17638, "wafc</w>": 49086, "waff": 13940, "waffle</w>": 20375, "waffles</w>": 24205, "wag": 5764, "wag</w>": 19177, "wage</w>": 10716, "wager</w>": 43430, "wages</w>": 19114, "wagner</w>": 18081, "wagon</w>": 13260, "wagons</w>": 47944, "wags</w>": 48580, "wah": 24812, "wah</w>": 18014, "wahl": 27500, "wahlberg</w>": 35151, "wahoo</w>": 47995, "wai": 11469, "wai</w>": 21569, "waifu</w>": 46551, "waikiki</w>": 44907, "wain": 28358, "wain</w>": 20120, "wainwright</w>": 45878, "waist": 36946, "waist</w>": 18459, "wait": 10021, "wait</w>": 1885, "waite</w>": 24272, "waited</w>": 18492, "waiter</w>": 32946, "waitin</w>": 44482, "waiting</w>": 2680, "waitress</w>": 39760, "waitrose</w>": 37164, "waits</w>": 21361, "waiver</w>": 42866, "waj</w>": 49367, "wak": 11172, "wak</w>": 36015, "waka</w>": 42696, "wake": 10501, "wake</w>": 5731, "wakefield</w>": 26358, "wakes</w>": 29108, "wakeup": 26328, "wakeup</w>": 35380, "wakeupamerica</w>": 37474, "waking</w>": 13025, "wal": 1056, "wal</w>": 6903, "wala</w>": 16468, "walang</w>": 49180, "walcott</w>": 45744, "wald": 46930, "wald</w>": 15724, "walden</w>": 39311, "waldo</w>": 32440, "waldorf</w>": 38227, "wale": 41247, "wale</w>": 20336, "wales": 25383, "wales</w>": 5110, "walgreens</w>": 38490, "wali": 37576, "wali</w>": 14768, "walia</w>": 44455, "walk": 8588, "walk</w>": 2374, "walkaway</w>": 48255, "walked</w>": 8667, "walker": 24735, "walker</w>": 6150, "walkers</w>": 23366, "walkin</w>": 45792, "walking": 12644, "walking</w>": 3941, "walkingdead</w>": 14948, "walkout</w>": 47470, "walks</w>": 8192, "walkway</w>": 36614, "wall": 4316, "wall</w>": 2569, "walla": 26007, "walla</w>": 39982, "wallabies</w>": 48926, "wallace</w>": 12535, "wallart</w>": 36223, "walled</w>": 36567, "waller</w>": 45340, "wallet</w>": 12154, "wallets</w>": 38550, "walleye</w>": 49099, "wallis</w>": 42206, "wallpaper</w>": 10560, "wallpapers</w>": 29841, "walls</w>": 8258, "wallstreet</w>": 45341, "wally</w>": 26024, "walmart</w>": 11972, "walnut</w>": 16310, "walnuts</w>": 38294, "walsall</w>": 42935, "walsh</w>": 12856, "walt": 23535, "walt</w>": 14312, "waltdisneyworld</w>": 36505, "walter": 31156, "walter</w>": 10645, "walters</w>": 25532, "waltham": 42742, "waltham</w>": 45581, "walton</w>": 19485, "waltz</w>": 35982, "wam": 20503, "wamy</w>": 46970, "wan": 2060, "wan</w>": 4557, "wana</w>": 30830, "wand": 14636, "wand</w>": 28559, "wanda</w>": 25070, "wander": 12985, "wander</w>": 24473, "wandered</w>": 46593, "wanderers</w>": 27540, "wandering</w>": 22597, "wanderlust</w>": 16129, "wane</w>": 27459, "wang": 19731, "wang</w>": 11900, "wani</w>": 21674, "wankers</w>": 42189, "wann": 23622, "wanna": 35940, "wanna</w>": 3836, "wannabe</w>": 40730, "wannaone</w>": 44832, "want": 18356, "want</w>": 1280, "wanted</w>": 3146, "wanting</w>": 12801, "wants</w>": 3107, "wap": 27393, "wap</w>": 30368, "waq": 47512, "war": 984, "war</w>": 2238, "wara</w>": 21631, "warbler</w>": 33891, "warcraft</w>": 13660, "ward": 7728, "ward</w>": 1460, "warden</w>": 27798, "wardly</w>": 30780, "wardro": 14247, "wardrobe</w>": 15020, "wards</w>": 2593, "ware": 7416, "ware</w>": 4476, "wareagle</w>": 35716, "warehouse</w>": 13054, "wareness": 41601, "wareness</w>": 35870, "wares</w>": 30692, "warfare</w>": 15739, "warhammer</w>": 26832, "warhol</w>": 27554, "wari</w>": 20977, "wark": 46346, "wark</w>": 15164, "warlock</w>": 42455, "warm": 14725, "warm</w>": 3616, "warmed</w>": 36695, "warmer</w>": 14328, "warmest</w>": 30910, "warming</w>": 8606, "warmly</w>": 45322, "warmongers</w>": 33205, "warms</w>": 32917, "warmth</w>": 19636, "warmup</w>": 29904, "warmups</w>": 44094, "warn</w>": 19360, "warned</w>": 16409, "warner": 28564, "warner</w>": 13402, "warning</w>": 4994, "warnings</w>": 18098, "warns</w>": 14086, "waron": 38947, "warp</w>": 32411, "warped</w>": 32125, "warran": 17392, "warrant</w>": 22554, "warrants</w>": 45677, "warranty</w>": 23999, "warren": 23143, "warren</w>": 9234, "warri": 4109, "warrington</w>": 31203, "warrior": 18998, "warrior</w>": 8148, "warriors</w>": 6421, "wars</w>": 3931, "warsaw</w>": 21072, "warship</w>": 47846, "wart": 43535, "wart</w>": 7346, "wartime</w>": 42998, "warts</w>": 21781, "warwick": 23081, "warwick</w>": 22215, "warwickshire</w>": 36766, "wary</w>": 36213, "was": 3398, "was</w>": 739, "wasabi</w>": 47334, "wash": 3363, "wash</w>": 7810, "washed</w>": 14092, "washer</w>": 24085, "washes</w>": 38950, "washing</w>": 13029, "washington": 16774, "washington</w>": 4365, "washingtondc</w>": 40225, "washingtonpost</w>": 28426, "wasn</w>": 5044, "wasnt</w>": 29607, "wasp</w>": 24889, "wasps</w>": 35300, "wassup</w>": 45708, "wast": 28886, "waste": 18157, "waste</w>": 6065, "wasted</w>": 18278, "wasteland</w>": 44035, "wastewater</w>": 34463, "wasting</w>": 25577, "wat": 800, "wat</w>": 10621, "wata</w>": 42509, "watch": 7046, "watch</w>": 1239, "watchdog</w>": 35303, "watched</w>": 5775, "watcher</w>": 35971, "watchers</w>": 28443, "watches</w>": 9521, "watchin</w>": 32432, "watching</w>": 2113, "water": 2505, "water</w>": 1573, "watercolor</w>": 14211, "watercolour</w>": 18377, "waterfall</w>": 16403, "waterfalls</w>": 26692, "waterford</w>": 24448, "waterfront</w>": 16605, "waterhouse</w>": 45072, "watering</w>": 19871, "waterloo</w>": 17465, "watermelon</w>": 19889, "waterproof</w>": 17613, "waters</w>": 7753, "watershed</w>": 33204, "waterstones</w>": 45014, "waterways</w>": 37395, "watford</w>": 23162, "watfordfc</w>": 37328, "wati</w>": 27966, "watkins</w>": 22539, "watson": 35490, "watson</w>": 9294, "watt": 22899, "watt</w>": 15805, "wattpad</w>": 32351, "watts</w>": 14750, "wau": 9479, "wav": 6054, "wave": 17530, "wave</w>": 4535, "waved</w>": 44657, "waver": 25997, "waves</w>": 7882, "waving</w>": 26545, "wavy</w>": 31941, "waw": 22039, "wawrinka</w>": 48414, "wawx</w>": 47387, "wax": 18789, "wax</w>": 11910, "waxing</w>": 38781, "way": 3079, "way</w>": 923, "wayback": 47822, "wayne": 23632, "wayne</w>": 7003, "ways</w>": 1248, "waz": 20889, "waz</w>": 48835, "wb": 10726, "wb</w>": 12377, "wba</w>": 22675, "wbb</w>": 14482, "wbc</w>": 26745, "wbo</w>": 49053, "wbz</w>": 35471, "wc": 4842, "wc</w>": 5755, "wcc": 47166, "wcc</w>": 34926, "wcpo</w>": 46624, "wcs</w>": 39916, "wcvb</w>": 32709, "wcw</w>": 9041, "wd": 15998, "wd</w>": 7494, "wdw</w>": 40334, "we": 598, "we</w>": 649, "wea": 37146, "wea</w>": 47301, "weak": 12128, "weak</w>": 10128, "weaker</w>": 39735, "weakness</w>": 21448, "weaknesses</w>": 43487, "weal": 14759, "wealth": 33150, "wealth</w>": 7904, "wealthy</w>": 22617, "weap": 6156, "weapon": 42612, "weapon</w>": 10537, "weapons</w>": 10007, "wear": 12206, "wear</w>": 2839, "wearab": 22983, "wearable": 44943, "wearable</w>": 24973, "wearables</w>": 30319, "weare": 4264, "weare</w>": 27867, "weareall": 45980, "wearec": 43620, "wearen": 45635, "weareone": 16149, "weareoneexo</w>": 16448, "wearethe": 40242, "wearing</w>": 3309, "wears</w>": 11869, "weary</w>": 38766, "weasel</w>": 44308, "weather": 8808, "weather</w>": 2237, "weathercee</w>": 44980, "weatherchannel</w>": 42138, "weav": 22260, "weave</w>": 22450, "weaver</w>": 20297, "weaving</w>": 27131, "web": 2055, "web</w>": 4601, "webb</w>": 15708, "webber</w>": 34248, "webcam": 24211, "webcam</w>": 22589, "webcamtoy</w>": 27719, "webcast</w>": 28256, "webcomic</w>": 34286, "webcomics</w>": 39811, "webdesign</w>": 20470, "webdev</w>": 37000, "webdevelopment</w>": 47553, "weber</w>": 20179, "webin": 8460, "webinar</w>": 8921, "webinars</w>": 47755, "webpage</w>": 46964, "webs</w>": 32829, "webseries</w>": 44819, "website</w>": 3364, "websites</w>": 19278, "webster</w>": 19471, "websummit</w>": 48069, "wec</w>": 33152, "wechat</w>": 46124, "wed": 1687, "wed</w>": 3478, "wedd": 7576, "wedding": 11204, "wedding</w>": 3101, "weddings</w>": 15964, "wedge</w>": 21446, "wedges</w>": 33179, "wedne": 2380, "wednesday": 9311, "wednesday</w>": 2689, "wednesdaymotivation</w>": 37860, "wednesdays</w>": 24943, "wednesdaywisdom</w>": 11445, "wedo</w>": 43432, "weds</w>": 19107, "wee": 716, "wee</w>": 8288, "weed": 36935, "weed</w>": 8015, "weeds</w>": 26326, "week": 1286, "week</w>": 994, "weekday</w>": 29244, "weekdays</w>": 44330, "weekend": 17205, "weekend</w>": 1456, "weekender</w>": 36547, "weekends</w>": 14564, "weekly": 34652, "weekly</w>": 5885, "weeknd</w>": 29925, "weeks</w>": 2898, "weeksary</w>": 24628, "ween": 17517, "ween</w>": 1599, "weep</w>": 39270, "weeping</w>": 36629, "weer</w>": 32491, "weet</w>": 17742, "weets</w>": 13454, "wef</w>": 23313, "weg": 47867, "weg</w>": 47561, "wego": 44784, "wego</w>": 28220, "weh": 48458, "weh</w>": 40313, "weho</w>": 47798, "wei": 6958, "wei</w>": 20952, "weibo</w>": 20613, "weigh": 10565, "weigh</w>": 17346, "weighed</w>": 33210, "weighing</w>": 24455, "weighs</w>": 20481, "weight": 12723, "weight</w>": 3868, "weighted</w>": 43179, "weightlifting</w>": 36164, "weightloss</w>": 20359, "weights</w>": 21374, "weil</w>": 43720, "weiler</w>": 42203, "wein": 29134, "wein</w>": 37684, "weiner</w>": 38822, "weinstein</w>": 34367, "weir": 11299, "weir</w>": 25517, "weird": 27981, "weird</w>": 5613, "weirdest</w>": 29482, "weirdo</w>": 32476, "weis": 26251, "weiser</w>": 34833, "weiss</w>": 24794, "wel": 1267, "wel</w>": 8042, "welch</w>": 25820, "welcom": 11578, "welcome": 18318, "welcome</w>": 1881, "welcomed</w>": 12590, "welcomes</w>": 9304, "welcometo": 47511, "welcoming</w>": 8775, "weld</w>": 39776, "welding</w>": 24956, "welfare</w>": 12129, "well": 3277, "well</w>": 1123, "wellbeing</w>": 14273, "weller</w>": 40921, "welling</w>": 49165, "wellington</w>": 15389, "wellness": 40574, "wellness</w>": 9904, "wells": 42705, "wells</w>": 9804, "welove": 13573, "welp</w>": 28391, "wels</w>": 20852, "welsh": 19173, "welsh</w>": 10977, "welt</w>": 38595, "welter": 37115, "welterweight</w>": 39617, "wemb": 15213, "wembley</w>": 16579, "wen": 6590, "wen</w>": 11278, "wend": 15166, "wendell</w>": 42091, "wendy": 31616, "wendy</w>": 14074, "wenger</w>": 21105, "went": 18633, "went</w>": 2437, "wentworth</w>": 36423, "wentz</w>": 39179, "wer": 6316, "wer</w>": 2980, "were": 15461, "were</w>": 1365, "wered</w>": 6605, "weren</w>": 13611, "werewolf</w>": 32001, "werk</w>": 30176, "werner</w>": 29917, "wers</w>": 7110, "wes": 18620, "wes</w>": 14738, "wesle": 29606, "wesley</w>": 17332, "wesleyan</w>": 32509, "wesome</w>": 33292, "wess</w>": 44431, "west": 2973, "west</w>": 1593, "westbound</w>": 29208, "westbrook</w>": 26948, "westchester</w>": 36675, "westcoast</w>": 44610, "westend</w>": 44815, "wester": 9846, "western": 17079, "western</w>": 4463, "westfield</w>": 32309, "westh": 36798, "westin</w>": 43232, "westlake</w>": 41535, "westminster</w>": 15158, "weston</w>": 22771, "westside</w>": 33762, "westwood</w>": 26371, "westworld</w>": 42287, "wet": 12406, "wet</w>": 6682, "weta</w>": 40946, "wethenorth</w>": 45281, "wethepeople</w>": 48030, "wether": 33794, "wether</w>": 48405, "wetland</w>": 37357, "wetlands</w>": 26547, "wett</w>": 41971, "wetter</w>": 43957, "wewant": 39280, "wewill": 37241, "wex": 17234, "wexford</w>": 29876, "wexmondays</w>": 49042, "wey": 30376, "wey</w>": 19781, "weymouth</w>": 41433, "wf": 14576, "wf</w>": 22313, "wfa": 44606, "wfc</w>": 36431, "wfp</w>": 35193, "wftv</w>": 47075, "wg": 21091, "wg</w>": 25857, "wga</w>": 32354, "wgn": 48828, "wh": 573, "wh</w>": 13844, "wha": 18994, "wha</w>": 25884, "whal": 38967, "whale": 37083, "whale</w>": 11650, "whales</w>": 17722, "wham": 42506, "whar": 15517, "wharf</w>": 22452, "wharton</w>": 43320, "what": 4268, "what</w>": 768, "whatcha</w>": 37160, "whate": 6695, "whatever</w>": 6743, "whati": 23500, "whats": 9263, "whats</w>": 13084, "whatsapp</w>": 10119, "whatsoever</w>": 39928, "whatson": 35632, "whatyou": 30508, "whe": 2009, "whead</w>": 34583, "wheat": 20505, "wheat</w>": 10303, "wheaton</w>": 46933, "wheel": 7360, "wheel</w>": 6744, "wheelchair</w>": 17713, "wheeler</w>": 18405, "wheeling</w>": 34839, "wheels</w>": 8025, "whel": 9792, "whelan</w>": 40715, "when": 8753, "when</w>": 827, "whenever</w>": 10500, "where": 7052, "where</w>": 1234, "whereabouts</w>": 47808, "whereas</w>": 42234, "wheres": 46345, "wherever</w>": 14103, "whereyou": 46837, "whether</w>": 5903, "whew</w>": 39016, "whey</w>": 34556, "whi": 4295, "whi</w>": 33129, "which</w>": 1448, "whiche": 48719, "whichever</w>": 49138, "whil": 8499, "while</w>": 1519, "whilst</w>": 8596, "whim": 27766, "whimsical</w>": 42282, "whip</w>": 14412, "whipped</w>": 22323, "whipping</w>": 41567, "whir": 20873, "whirl": 30962, "whirlwind</w>": 47771, "whis": 6024, "whiskey": 41381, "whiskey</w>": 11610, "whisky": 37567, "whisky</w>": 12599, "whisp": 21986, "whispe": 30356, "whisper</w>": 27616, "whisperer</w>": 41368, "whispering</w>": 42599, "whispers</w>": 29133, "whist": 13640, "whistle": 23972, "whistle</w>": 19746, "whistleblower</w>": 40410, "whistler</w>": 29633, "whit": 4398, "whit</w>": 31498, "whitaker</w>": 35851, "whitby</w>": 30858, "white": 4699, "white</w>": 1579, "whiteboard</w>": 40839, "whitec": 24575, "whitehall</w>": 42827, "whitehead</w>": 43560, "whitehouse</w>": 20776, "whitening</w>": 35540, "whitepaper</w>": 42713, "whites": 35886, "whites</w>": 18835, "whitesox</w>": 28816, "whitewater</w>": 49350, "whitfield</w>": 48404, "whitley</w>": 40564, "whitman</w>": 32394, "whitney": 43021, "whitney</w>": 18048, "whitt": 33784, "whittaker</w>": 47595, "whl</w>": 25801, "who": 2969, "who</w>": 822, "whoa</w>": 16943, "whoever</w>": 11137, "whois": 41884, "whole": 10360, "whole</w>": 2954, "wholefoods</w>": 42840, "wholesale</w>": 18306, "wholesome</w>": 35959, "whom": 38158, "whom</w>": 12873, "whoo": 20003, "whoo</w>": 49290, "whoop</w>": 22060, "whoops</w>": 28433, "whopping</w>": 34384, "whore</w>": 31690, "whos": 41460, "whos</w>": 27130, "whose</w>": 6933, "whouse</w>": 45927, "whs</w>": 26292, "wht</w>": 32470, "whufc</w>": 31695, "whun": 18272, "why": 11040, "why</w>": 1182, "whyte</w>": 42386, "wi": 820, "wi</w>": 5585, "wib</w>": 45303, "wic": 7834, "wich": 9759, "wich</w>": 5238, "wichita</w>": 22566, "wick": 6798, "wick</w>": 6479, "wicked": 32579, "wicked</w>": 12825, "wicker</w>": 38096, "wicket</w>": 19180, "wickets</w>": 22110, "wicklow</w>": 39039, "wicz</w>": 30121, "wid": 11886, "wid</w>": 20886, "wide": 19341, "wide</w>": 3184, "widely</w>": 16195, "widening</w>": 46598, "wider</w>": 21263, "widesp": 20598, "widespread</w>": 21258, "widget</w>": 43906, "wido": 28068, "widow</w>": 19949, "widows</w>": 42129, "width</w>": 23571, "wie": 21378, "wie</w>": 9131, "wielding</w>": 47272, "wien</w>": 38131, "wiener</w>": 40567, "wies": 42788, "wif</w>": 37572, "wife</w>": 3607, "wifey</w>": 35282, "wifi</w>": 11026, "wig": 23690, "wig</w>": 12216, "wigan</w>": 23130, "wiggins</w>": 32329, "wiggle</w>": 47812, "wight": 41278, "wight</w>": 15545, "wigs</w>": 31207, "wii</w>": 8005, "wiiu</w>": 40980, "wiki": 10373, "wiki</w>": 24265, "wikileaks</w>": 28731, "wikipedia</w>": 15176, "wil": 1352, "wil</w>": 20581, "wilbur</w>": 43069, "wilcox</w>": 43231, "wild": 2780, "wild</w>": 3220, "wildatlantic": 35500, "wildatlanticway</w>": 35776, "wildcard</w>": 37360, "wildcat": 49077, "wildcat</w>": 25870, "wildcats</w>": 15909, "wilde</w>": 23498, "wilder": 14343, "wilder</w>": 23499, "wilderness</w>": 16506, "wildest</w>": 43028, "wildfire</w>": 22788, "wildfires</w>": 29184, "wildflower": 27628, "wildflower</w>": 33181, "wildflowerhour</w>": 31302, "wildflowers</w>": 29136, "wildlife": 13298, "wildlife</w>": 5250, "wildlifephotography</w>": 32307, "wildlifewednesday</w>": 48537, "wildly</w>": 35981, "wildoz</w>": 40113, "wiley</w>": 32747, "wilhelm</w>": 39696, "wilkes</w>": 39548, "wilkins</w>": 36986, "wilkinson</w>": 26797, "will": 5062, "will</w>": 751, "willam": 43276, "willard</w>": 44920, "wille": 48739, "willem</w>": 38044, "willi": 2256, "william": 8420, "william</w>": 4705, "williams": 38452, "williams</w>": 4075, "williamsburg</w>": 30683, "williamson</w>": 20793, "willie</w>": 13907, "willing": 34160, "willing</w>": 11718, "willingness</w>": 40573, "willis</w>": 18491, "willow": 33887, "willow</w>": 15665, "wills</w>": 26913, "willy": 34502, "willy</w>": 19599, "wilmington</w>": 28052, "wilms": 47879, "wilshere</w>": 48359, "wilson": 23629, "wilson</w>": 5622, "wilt": 23394, "wilt</w>": 47357, "wilton</w>": 46638, "wiltshire</w>": 28025, "wim": 8662, "wim</w>": 27580, "wimble": 11752, "wimbledon</w>": 12229, "win": 831, "win</w>": 1225, "winchester</w>": 20647, "wind": 6812, "wind</w>": 3630, "winder": 44454, "winder</w>": 46245, "winding</w>": 22390, "windmill</w>": 34084, "windo": 3110, "window": 26675, "window</w>": 4879, "windows</w>": 5437, "winds": 12668, "winds</w>": 7012, "windshield</w>": 33002, "windsor": 44322, "windsor</w>": 12884, "windy</w>": 13446, "wine": 7375, "wine</w>": 2604, "winelover</w>": 26357, "winemaker</w>": 41588, "wineoclock</w>": 43846, "wineries</w>": 49349, "winery</w>": 15500, "wines</w>": 8263, "winetasting</w>": 41288, "winewednesday</w>": 35447, "wing": 8141, "wing</w>": 1340, "winged</w>": 24993, "winger</w>": 22727, "winget</w>": 44578, "wings</w>": 5178, "wink": 34455, "wink</w>": 25859, "winkle</w>": 36430, "winn</w>": 38104, "winne": 46273, "winner": 32961, "winner</w>": 2520, "winners</w>": 4320, "winni": 13018, "winnie</w>": 29022, "winning": 42099, "winning</w>": 2577, "winnings</w>": 46490, "winnipeg</w>": 14369, "winona</w>": 49202, "wins": 46839, "wins</w>": 2718, "winslow</w>": 39658, "winston</w>": 14848, "winter": 7340, "winter</w>": 2541, "winters</w>": 21587, "wintry</w>": 39504, "wip</w>": 10447, "wipe</w>": 26761, "wiped</w>": 31822, "wipes</w>": 33463, "wir": 16849, "wir</w>": 44838, "wire": 7558, "wire</w>": 7794, "wired</w>": 18935, "wireless</w>": 9103, "wires</w>": 24311, "wiring</w>": 36434, "wirral</w>": 34675, "wis": 3392, "wis</w>": 20405, "wiscon": 9857, "wisconsin</w>": 10265, "wisdom": 42474, "wisdom</w>": 5425, "wise": 19116, "wise</w>": 5558, "wisely</w>": 26173, "wiser</w>": 44859, "wish": 11328, "wish</w>": 2412, "wished</w>": 25883, "wishes</w>": 6045, "wishing</w>": 5307, "wishlist</w>": 31969, "wit": 584, "wit</w>": 8531, "witch": 20139, "witch</w>": 10083, "witchcraft</w>": 35065, "witcher</w>": 33684, "witches</w>": 21673, "with": 1435, "with</w>": 593, "withdra": 24696, "withdraw</w>": 31670, "withdrawal</w>": 25765, "withdrawn</w>": 46687, "withdraws</w>": 48637, "wither": 39655, "witherspoon</w>": 45409, "within</w>": 4154, "withme</w>": 44670, "without": 32836, "without</w>": 2193, "withstand</w>": 42236, "withthe": 36872, "withus</w>": 30572, "withyou</w>": 30351, "witne": 12096, "witness</w>": 8793, "witnessed</w>": 20187, "witnesses</w>": 22778, "witnessing</w>": 33618, "wits</w>": 30938, "witt": 38194, "witt</w>": 17168, "witter</w>": 31597, "witty</w>": 29970, "witz": 44186, "witz</w>": 13265, "wiv</w>": 48925, "wives</w>": 14378, "wiwx</w>": 44461, "wiz": 7730, "wiz</w>": 23178, "wizar": 49121, "wizard": 30490, "wizard</w>": 14295, "wizards</w>": 19140, "wizkid</w>": 40146, "wj": 19739, "wj</w>": 35453, "wk": 11512, "wk</w>": 11528, "wkend</w>": 42336, "wknd</w>": 20851, "wks</w>": 25508, "wku</w>": 43377, "wl": 13299, "wl</w>": 9613, "wm": 20268, "wm</w>": 15790, "wn": 1186, "wn</w>": 757, "wnba</w>": 32358, "wned</w>": 8628, "wns</w>": 12950, "wnt</w>": 22484, "wny</w>": 24833, "wo": 1613, "wo</w>": 11132, "woah</w>": 17751, "wob": 35984, "woc</w>": 39011, "wod</w>": 41522, "woes</w>": 27860, "wof": 45671, "woj": 48931, "wok</w>": 28912, "woke</w>": 9331, "woken</w>": 43697, "woking</w>": 43931, "wol": 2798, "wol</w>": 48622, "wold</w>": 42399, "wolf": 9453, "wolf</w>": 5916, "wolfe</w>": 24989, "wolff</w>": 34369, "wolfgang</w>": 34061, "wolfpack</w>": 30887, "wolve": 45101, "wolver": 14334, "wolverhampton</w>": 34518, "wolverine</w>": 23353, "wolverines</w>": 42003, "wolves</w>": 9372, "wom": 1087, "womack</w>": 48980, "woman": 15716, "woman</w>": 2308, "womanc": 35630, "womancrush": 37721, "womancrushwednesday</w>": 39714, "womanin": 30562, "womaninbiz</w>": 36482, "womb</w>": 37023, "women": 3648, "women</w>": 1507, "womenin": 13062, "womeninscience</w>": 41343, "womeninstem</w>": 29380, "womenintech</w>": 31470, "womenof": 48421, "womens": 12822, "womens</w>": 14408, "womensart</w>": 38548, "womensday</w>": 13956, "womenshi": 22887, "womenshistorymonth</w>": 24982, "womensmarch</w>": 30102, "won": 1528, "won</w>": 1749, "wonder": 2070, "wonder</w>": 3936, "wondercon</w>": 46944, "wondered</w>": 15550, "wonderful</w>": 2582, "wonderfully</w>": 23245, "wondering</w>": 8360, "wonderland</w>": 13874, "wonders</w>": 14048, "wonderwoman</w>": 31000, "wondo</w>": 38402, "wondr": 46771, "wong</w>": 17876, "wonka</w>": 43463, "wont": 43174, "wont</w>": 15952, "woo": 1867, "woo</w>": 9322, "wood": 3269, "wood</w>": 1704, "woodbridge</w>": 49074, "wooden": 48226, "wooden</w>": 9057, "woodland": 44314, "woodland</w>": 17447, "woodlands</w>": 32430, "woodley</w>": 40566, "woodpecker</w>": 32684, "woods</w>": 6267, "woodson</w>": 48967, "woodstock</w>": 29486, "woodward</w>": 27419, "woodwork</w>": 47386, "woodworking</w>": 29267, "woody": 38627, "woody</w>": 17144, "woof": 34234, "woof</w>": 24028, "woohoo</w>": 20172, "wook</w>": 29192, "wool": 9967, "wool</w>": 13283, "woolf</w>": 43728, "woolly</w>": 47722, "woon</w>": 33126, "wooo</w>": 43217, "woop</w>": 31884, "woot</w>": 22466, "wor": 641, "worcester": 22172, "worcester</w>": 19580, "worcestershire</w>": 38440, "worcestershirehour</w>": 43644, "word": 8272, "word</w>": 2653, "wordof": 33500, "wordoftheday</w>": 43594, "wordpress</w>": 15193, "words": 31007, "words</w>": 2709, "wore</w>": 8953, "work": 1636, "work</w>": 951, "workday</w>": 29735, "worked</w>": 5410, "worker</w>": 8098, "workers</w>": 4795, "workflow</w>": 28502, "workforce</w>": 14672, "workin": 31825, "workin</w>": 26323, "working": 20806, "working</w>": 1699, "workinprogress</w>": 46086, "workout</w>": 6773, "workouts</w>": 22779, "workplace</w>": 11959, "workplaces</w>": 47383, "works</w>": 2322, "workshop</w>": 3832, "workshops</w>": 12262, "workspace</w>": 34470, "worl": 5221, "world": 2334, "world</w>": 1002, "worlda": 46627, "worldbank</w>": 36759, "worldbookday</w>": 31191, "worldcup": 42525, "worldcup</w>": 8650, "worlden": 44668, "worldenviron": 47115, "worldenvironmentday</w>": 47522, "worldly</w>": 36268, "worldo": 41698, "worldof": 22636, "worldre": 33951, "worlds</w>": 7691, "worldseries</w>": 26695, "worldtour</w>": 23202, "worldwater": 41176, "worldwaterday</w>": 44520, "worldwide</w>": 6214, "worm": 33709, "worm</w>": 10945, "worms</w>": 20231, "worn</w>": 9037, "worried</w>": 11911, "worries</w>": 17684, "worry</w>": 7534, "worrying</w>": 24058, "worse</w>": 8236, "worsen": 46344, "worshi": 31840, "worship": 46399, "worship</w>": 9023, "worst</w>": 5719, "wort</w>": 30209, "worth": 10671, "worth</w>": 2450, "worthing</w>": 39929, "worthit</w>": 40830, "worthless</w>": 44736, "worths</w>": 44633, "worthwhile</w>": 36295, "worthy</w>": 8881, "worx</w>": 44973, "wot</w>": 24863, "wou": 5279, "would": 39873, "would</w>": 1311, "wouldn</w>": 5878, "wouldnt</w>": 41595, "wound</w>": 19231, "wounded</w>": 14859, "wounds</w>": 21290, "woven</w>": 19830, "wow": 22191, "wow</w>": 2781, "woz": 44558, "wozni": 47782, "wp": 15378, "wp</w>": 13302, "wpg</w>": 35048, "wps</w>": 33386, "wq": 45195, "wr": 1189, "wr</w>": 8028, "wra": 3852, "wra</w>": 46004, "wral</w>": 49050, "wrangler</w>": 30923, "wrap</w>": 7094, "wrapped</w>": 9875, "wrapping</w>": 15223, "wraps</w>": 18236, "wrath</w>": 29783, "wray</w>": 48943, "wrc</w>": 16004, "wre": 3168, "wreath</w>": 23091, "wrec": 20879, "wreck": 28775, "wreck</w>": 15017, "wrecked</w>": 32695, "wreckem</w>": 45676, "wrecking</w>": 36956, "wrecks</w>": 45545, "wren": 20191, "wren</w>": 31970, "wrench</w>": 30980, "wrest": 4177, "wrestle": 17097, "wrestle</w>": 28086, "wrestlemania</w>": 18849, "wrestler</w>": 19790, "wrestlers</w>": 25902, "wrestling": 31292, "wrestling</w>": 5904, "wrexham</w>": 34479, "wri": 7667, "wri</w>": 42007, "wright": 28616, "wright</w>": 6991, "wrights</w>": 43711, "wrigley</w>": 33538, "wrink": 22201, "wrinkle</w>": 46642, "wrinkles</w>": 35525, "wrist": 19243, "wrist</w>": 16139, "wristband</w>": 36890, "wristbands</w>": 44864, "writ": 2902, "write": 28874, "write</w>": 4946, "writer": 27886, "writer</w>": 4422, "writers": 18742, "writers</w>": 7307, "writerslife</w>": 25007, "writes</w>": 8023, "writing": 16053, "writing</w>": 2979, "writingcommunity</w>": 39178, "writings</w>": 36259, "written</w>": 5231, "wro": 5447, "wrong": 18381, "wrong</w>": 3669, "wrongly</w>": 45642, "wrote</w>": 5796, "wrought</w>": 48125, "wrs</w>": 45280, "ws": 6300, "ws</w>": 799, "wsb": 30681, "wsbtv</w>": 38394, "wsj</w>": 19764, "wski</w>": 12548, "wsl</w>": 43706, "wsoc</w>": 40253, "wson</w>": 33954, "wsop</w>": 41231, "wsu": 44674, "wsu</w>": 32913, "wsw</w>": 43285, "wt": 15873, "wt</w>": 12255, "wta</w>": 25984, "wtc</w>": 39718, "wtf</w>": 6891, "wth</w>": 23021, "wthr": 45269, "wti</w>": 47345, "wto</w>": 36406, "wts</w>": 32159, "wu": 9710, "wu</w>": 9837, "wud</w>": 43870, "wul": 35154, "wunder": 36661, "wur": 24040, "wurst</w>": 44409, "wusa</w>": 40021, "wut</w>": 28590, "wv": 18920, "wv</w>": 14743, "wvu": 44878, "wvu</w>": 25879, "ww": 3181, "ww</w>": 4491, "wwc</w>": 26505, "wwdc</w>": 47441, "wwe": 12112, "wwe</w>": 5290, "wwen": 23308, "wwenetwork</w>": 37228, "wwenxt</w>": 39898, "wwer": 32038, "wwf</w>": 23332, "wwfc</w>": 42681, "wwg</w>": 35322, "wwi</w>": 20194, "wwii</w>": 10261, "www": 26074, "www</w>": 9667, "wwwbigbaldhead</w>": 30761, "wwww": 34224, "wwww</w>": 25200, "wwwww</w>": 48268, "wwx</w>": 47431, "wx": 18192, "wx</w>": 3561, "wy": 4665, "wy</w>": 7625, "wyatt</w>": 21660, "wyd</w>": 33113, "wye": 48436, "wye</w>": 43751, "wylie</w>": 49330, "wyn": 11802, "wyn</w>": 17504, "wynn</w>": 36117, "wynne</w>": 35951, "wynonna": 41456, "wynonnaearp</w>": 43755, "wyoming</w>": 18693, "x": 87, "x</w>": 343, "xa</w>": 24831, "xan": 45530, "xander</w>": 45601, "xavi": 36342, "xavier": 41044, "xavier</w>": 18567, "xb</w>": 33678, "xbox": 18063, "xbox</w>": 7748, "xboxone</w>": 27410, "xc</w>": 12515, "xchange</w>": 49132, "xd</w>": 6380, "xe": 42886, "xe</w>": 19183, "xen": 15568, "xer": 49005, "xf</w>": 35274, "xfactor</w>": 25211, "xfinity</w>": 35107, "xford</w>": 34732, "xh</w>": 45771, "xham</w>": 25284, "xi": 2467, "xi</w>": 7376, "xia": 19854, "xia</w>": 20724, "xian</w>": 42570, "xiao</w>": 49318, "xiaomi</w>": 27477, "xico</w>": 38469, "xide</w>": 17398, "xie": 40122, "xie</w>": 15976, "xii</w>": 36525, "xiii</w>": 28199, "xim": 11217, "xin": 27053, "xin</w>": 41517, "xing</w>": 14383, "xion</w>": 24164, "xis</w>": 35793, "xit</w>": 5316, "xiumin</w>": 36563, "xiv</w>": 16125, "xj</w>": 42453, "xl": 36529, "xl</w>": 8833, "xley</w>": 38223, "xm</w>": 18626, "xma": 48805, "xmas": 48848, "xmas</w>": 6425, "xmen</w>": 28708, "xn</w>": 25388, "xo": 26936, "xo</w>": 9000, "xon": 29186, "xon</w>": 8482, "xox": 11531, "xox</w>": 34050, "xoxo</w>": 13313, "xp</w>": 15651, "xper": 32200, "xperia</w>": 37615, "xpo</w>": 44377, "xpress</w>": 31809, "xq</w>": 40606, "xr</w>": 26276, "xrp</w>": 26965, "xs</w>": 16397, "xt</w>": 1052, "xtina</w>": 45520, "xton": 32666, "xton</w>": 10597, "xtra</w>": 26969, "xtre": 27025, "xtreme</w>": 33483, "xu": 42063, "xu</w>": 37198, "xv</w>": 17768, "xvi</w>": 44031, "xx": 5675, "xx</w>": 3553, "xxl</w>": 29777, "xxx": 33923, "xxx</w>": 8352, "xxxx": 32035, "xxxx</w>": 22819, "xxxxx</w>": 44195, "xy": 20023, "xy</w>": 11443, "y": 88, "y</w>": 344, "ya": 5018, "ya</w>": 1430, "yaa": 48847, "yaa</w>": 34498, "yaan</w>": 34680, "yab": 27737, "yach": 9039, "yacht": 43806, "yacht</w>": 12859, "yachts</w>": 29260, "yad": 13276, "yad</w>": 40047, "yadav</w>": 26650, "yaf": 38019, "yag": 35081, "yah": 16170, "yah</w>": 12381, "yaho": 37929, "yahoo": 38152, "yahoo</w>": 16846, "yak": 11014, "yak</w>": 29074, "yaki</w>": 44677, "yaku": 29572, "yakuza</w>": 42628, "yal": 16198, "yal</w>": 13418, "yale": 39926, "yale</w>": 17157, "yall</w>": 9210, "yam": 6666, "yam</w>": 19318, "yama</w>": 23512, "yamaha</w>": 18854, "yan": 3949, "yan</w>": 4788, "yana</w>": 18698, "yand": 38609, "yang": 23818, "yang</w>": 12605, "yani</w>": 26439, "yankee</w>": 21554, "yankees</w>": 11889, "yann": 40246, "yann</w>": 38657, "yao</w>": 45231, "yap": 48700, "yap</w>": 34468, "yar": 6786, "yar</w>": 23071, "yard": 20234, "yard</w>": 4313, "yards</w>": 7550, "yarmouth</w>": 45941, "yarn</w>": 19702, "yarra</w>": 46824, "yas": 8168, "yas</w>": 20570, "yash": 30216, "yash</w>": 37836, "yasi": 37700, "yasss</w>": 23873, "yat": 29443, "yat</w>": 34965, "yates</w>": 27677, "yatra</w>": 38932, "yav": 41275, "yaw": 31989, "yawn</w>": 48643, "yay": 20614, "yay</w>": 6712, "yaya</w>": 37608, "yaz": 19348, "yaz</w>": 42252, "yb": 41785, "yb</w>": 27615, "yc</w>": 11931, "ycle</w>": 38089, "yd": 29896, "yd</w>": 9534, "yday</w>": 15899, "yds</w>": 24819, "ye": 693, "ye</w>": 4582, "yea</w>": 13687, "yeah": 29405, "yeah</w>": 3908, "year": 5163, "year</w>": 935, "yearbook</w>": 21636, "yearling</w>": 48392, "yearly</w>": 24541, "yearof": 31944, "yearofthe": 47899, "years": 30864, "years</w>": 1151, "yearsof": 14932, "yearswith": 45249, "yeast</w>": 25819, "yeats</w>": 44903, "yed": 28137, "yed</w>": 3301, "yee": 18114, "yee</w>": 23108, "yeezy</w>": 24901, "yeg": 16854, "yeg</w>": 11976, "yegfood</w>": 48711, "yeh</w>": 21331, "yel": 3323, "yel</w>": 48164, "yell</w>": 30824, "yelled</w>": 39199, "yelling</w>": 26581, "yellow": 12059, "yellow</w>": 4481, "yellowstone</w>": 29241, "yelp</w>": 31674, "yemen": 29276, "yemen</w>": 12513, "yemeni</w>": 44656, "yemi</w>": 42267, "yen": 29602, "yen</w>": 17960, "yeo": 32292, "yeo</w>": 43830, "yeol</w>": 15808, "yeon</w>": 16602, "yep</w>": 10964, "yer": 15491, "yer</w>": 2371, "yers</w>": 3722, "yes": 21620, "yes</w>": 1958, "yess": 42778, "yess</w>": 40189, "yesss</w>": 36210, "yessss</w>": 45620, "yester": 1905, "yesterday</w>": 1926, "yesterdays</w>": 36238, "yesung</w>": 38527, "yet</w>": 2296, "yeti</w>": 34228, "yev</w>": 39855, "yew</w>": 34660, "yey</w>": 45447, "yg</w>": 16396, "ygk</w>": 44758, "ygo": 46166, "yh</w>": 41978, "yi": 5826, "yi</w>": 14762, "yield</w>": 16825, "yields</w>": 24856, "yikes</w>": 25094, "yin": 26476, "yin</w>": 23543, "ying": 42933, "ying</w>": 910, "yixing</w>": 32120, "yk</w>": 30965, "yl": 2656, "yl</w>": 4045, "ylan</w>": 41875, "ylde</w>": 42850, "yle": 32305, "yle</w>": 10770, "ylene</w>": 34239, "yler</w>": 48081, "yles</w>": 42860, "ylon</w>": 22375, "ylor</w>": 48468, "ym": 1786, "ym</w>": 19587, "yman</w>": 29077, "ymc": 47101, "ymca</w>": 22369, "yment</w>": 8199, "ymes</w>": 39968, "ymi</w>": 5271, "ymm</w>": 37133, "ymoun": 41426, "ymouth</w>": 36429, "yn": 2823, "yn</w>": 4100, "yne</w>": 18238, "ynes</w>": 18020, "ynn</w>": 10499, "ynna</w>": 48292, "ynwa</w>": 27372, "yo": 586, "yo</w>": 3497, "yoda</w>": 31922, "yof": 5966, "yofficial</w>": 21818, "yofthe": 43983, "yog": 34985, "yog</w>": 36539, "yoga": 25872, "yoga</w>": 5523, "yogh": 32626, "yoghurt</w>": 33491, "yogi</w>": 22766, "yogur": 16137, "yogurt</w>": 16819, "yoh</w>": 48880, "yoke</w>": 41969, "yoko": 25929, "yoko</w>": 32256, "yokohama</w>": 42409, "yol": 19387, "yol</w>": 35218, "yolanda</w>": 43845, "yolo</w>": 20905, "yom": 34718, "yom</w>": 44527, "yon": 10147, "yon</w>": 7604, "yong": 27960, "yong</w>": 20887, "yonge</w>": 48592, "yoo": 25842, "yoo</w>": 20775, "yoon": 30863, "yoon</w>": 22113, "yoona</w>": 32736, "yoongi</w>": 24037, "yor": 2028, "yor</w>": 21132, "york": 5318, "york</w>": 2705, "yorker</w>": 23865, "yorkers</w>": 41041, "yorks</w>": 39093, "yorkshi": 43367, "yorkshire": 27007, "yorkshire</w>": 8633, "yoruba</w>": 46083, "yos</w>": 35607, "yosemite</w>": 25893, "yoshi": 22920, "yoshi</w>": 25354, "yot": 22875, "yotes</w>": 46157, "yotpo</w>": 26113, "you": 1562, "you</w>": 592, "youare": 33879, "youcan": 32498, "youknow": 47919, "youknow</w>": 41088, "youn": 1596, "young": 6939, "young</w>": 1888, "younger</w>": 10414, "youngest</w>": 12316, "youngjae</w>": 46426, "youngster</w>": 35881, "youngsters</w>": 28098, "younow</w>": 33831, "your": 2130, "your</w>": 695, "youre": 28344, "youre</w>": 19695, "yourown": 28583, "yours</w>": 3834, "yourself</w>": 3053, "yourselves</w>": 19747, "youth": 10743, "youth</w>": 3281, "youthful</w>": 37480, "youths</w>": 23614, "youts</w>": 22737, "youtu": 13868, "youtube": 31258, "youtube</w>": 3895, "youtuber</w>": 24720, "youtubers</w>": 36822, "youu</w>": 35055, "youuu</w>": 35324, "youuuu</w>": 47123, "yoy</w>": 41865, "yp": 38370, "yp</w>": 34734, "ypg</w>": 37386, "yql</w>": 46122, "yqr</w>": 36881, "yr": 18395, "yr</w>": 4333, "yrs</w>": 4822, "ys": 1971, "ys</w>": 961, "yser</w>": 33121, "ysis</w>": 4843, "ysl</w>": 45681, "ysm</w>": 23842, "yst": 40528, "yt": 36777, "yt</w>": 14779, "ytd</w>": 47524, "yte</w>": 48172, "yu": 3371, "yu</w>": 8887, "yuan</w>": 26236, "yuck</w>": 48282, "yugo": 48231, "yuh</w>": 42547, "yui</w>": 47932, "yuk": 17037, "yuk</w>": 24063, "yuki</w>": 34010, "yukon</w>": 27094, "yul": 39832, "yum": 6869, "yum</w>": 7259, "yuma</w>": 47566, "yummy</w>": 7687, "yun": 14976, "yun</w>": 18288, "yung": 44545, "yung</w>": 17676, "yunho</w>": 39748, "yup</w>": 13231, "yur": 42533, "yuri</w>": 23823, "yusuf</w>": 33222, "yuv": 36784, "yves</w>": 33698, "yvon": 23327, "yvonne</w>": 32583, "yvr</w>": 29058, "yw": 33741, "yx</w>": 35624, "yxe</w>": 34240, "yy": 3433, "yy</w>": 8321, "yya</w>": 37444, "yyc": 27542, "yyc</w>": 11741, "yyj</w>": 26203, "yyy</w>": 11514, "yyyy": 38749, "yyyy</w>": 16955, "yyyyy</w>": 26089, "yyyyyy</w>": 47055, "yz": 37579, "yz</w>": 46451, "yü": 48232, "z": 89, "z</w>": 345, "za": 3710, "za</w>": 2186, "zab": 22982, "zable</w>": 37002, "zac": 25501, "zac</w>": 19159, "zach": 13401, "zach</w>": 11815, "zachary</w>": 32401, "zack": 30567, "zack</w>": 19120, "zad": 47314, "zad</w>": 27838, "zada</w>": 34889, "zaf": 21837, "zafar</w>": 46668, "zag": 26091, "zag</w>": 29346, "zagre": 34107, "zagreb</w>": 35355, "zah": 23258, "zah</w>": 43297, "zaha</w>": 44408, "zai": 44329, "zai</w>": 27065, "zain": 34400, "zain</w>": 45366, "zak": 13050, "zak</w>": 20738, "zaki</w>": 48091, "zal": 20552, "zal</w>": 33298, "zam": 7218, "zam</w>": 41578, "zambia</w>": 21671, "zan": 7284, "zan</w>": 17835, "zana</w>": 39643, "zand": 37712, "zane</w>": 34786, "zani</w>": 45373, "zania</w>": 15059, "zano</w>": 27637, "zanzi": 47835, "zap": 24134, "zapp": 33504, "zappa</w>": 46592, "zar": 5458, "zar</w>": 16392, "zara</w>": 24454, "zardari</w>": 20174, "zas</w>": 48261, "zation</w>": 3683, "zawa</w>": 49281, "zay": 7102, "zayed</w>": 36726, "zayn": 22292, "zayn</w>": 10308, "zaynmalik</w>": 25278, "zazzle</w>": 47857, "ze": 2254, "ze</w>": 1298, "zeal": 44951, "zealand</w>": 7618, "zeb</w>": 46518, "zebra": 47394, "zebra</w>": 22548, "zed": 21047, "zed</w>": 1993, "zedd</w>": 45608, "zee": 25468, "zee</w>": 14080, "zeiss</w>": 47460, "zeit": 37898, "zeit</w>": 37906, "zek</w>": 40829, "zeke</w>": 47065, "zel": 10389, "zel</w>": 12027, "zelda</w>": 17138, "zell</w>": 39526, "zen": 8518, "zen</w>": 3928, "zend": 33478, "zendaya</w>": 35956, "zenith</w>": 44740, "zens</w>": 15298, "zeph": 40726, "zepp": 22977, "zeppelin</w>": 25408, "zer": 6118, "zer</w>": 3716, "zero": 14867, "zero</w>": 5848, "zers</w>": 9547, "zes</w>": 4073, "zest</w>": 37709, "zet": 34098, "zeta</w>": 30954, "zetta</w>": 45993, "zeus</w>": 32800, "zey": 46647, "zh": 33389, "zh</w>": 41621, "zhang</w>": 21127, "zhen": 37374, "zhen</w>": 33236, "zhou</w>": 17384, "zhu": 42049, "zi": 2651, "zi</w>": 5819, "zia</w>": 13764, "zid": 30235, "zidane</w>": 34643, "zie": 29316, "zie</w>": 8956, "zieg": 40157, "ziegler</w>": 46812, "ziel</w>": 32151, "zier</w>": 15399, "zies</w>": 38001, "ziest</w>": 28159, "zig": 15950, "zig</w>": 21345, "ziggy</w>": 39274, "zik</w>": 30125, "zika</w>": 28783, "zil": 25039, "zil</w>": 33190, "zilla</w>": 17879, "zim": 8112, "zim</w>": 22577, "zimbab": 12373, "zimbabwe": 45668, "zimbabwe</w>": 13583, "zimmer": 27452, "zimmer</w>": 35211, "zimmerman</w>": 38231, "zin": 14085, "zin</w>": 21278, "zinc</w>": 27458, "zind": 26206, "zindabad</w>": 42208, "zine</w>": 16100, "zing": 25062, "zing</w>": 3152, "zinger</w>": 42027, "zio</w>": 13906, "zion": 31763, "zion</w>": 20963, "zione</w>": 36161, "zionist</w>": 33078, "zip": 26479, "zip</w>": 16083, "zipper</w>": 33670, "zir": 31892, "zl</w>": 39168, "zlat": 32489, "zlatan</w>": 37877, "zm</w>": 43691, "zman</w>": 24248, "zn</w>": 18004, "zo": 4397, "zo</w>": 5056, "zodi": 22660, "zodiac</w>": 27753, "zoe": 43114, "zoe</w>": 16662, "zoey</w>": 39871, "zog</w>": 40680, "zol": 25939, "zola</w>": 46105, "zom": 6623, "zombi": 29452, "zombie</w>": 11819, "zombies": 46702, "zombies</w>": 16517, "zon": 15109, "zon</w>": 14618, "zona</w>": 42134, "zone": 37197, "zone</w>": 4442, "zones</w>": 17247, "zoning</w>": 36790, "zoo": 8182, "zoo</w>": 7147, "zoom": 32671, "zoom</w>": 13909, "zor": 17605, "zou": 38072, "zr</w>": 39275, "zs</w>": 35248, "zshq</w>": 41442, "zt</w>": 42629, "zu": 4091, "zu</w>": 14184, "zucchini</w>": 29873, "zucker": 26890, "zuckerberg</w>": 30066, "zul": 31146, "zulu</w>": 32821, "zum</w>": 35094, "zuma</w>": 23326, "zumba</w>": 32976, "zun": 42440, "zur": 17128, "zurich</w>": 21288, "zw</w>": 42188, "zx</w>": 31604, "zy": 6615, "zy</w>": 2303, "zyk</w>": 39112, "zyme</w>": 36472, "zyn": 45287, "zz": 1544, "zz</w>": 4943, "zza</w>": 14642, "zzi</w>": 13974, "zzie</w>": 18635, "zzle</w>": 7873, "zzled</w>": 39075, "zzo</w>": 14036, "zzy": 21275, "zzy</w>": 8353, "zzz</w>": 20055, "zzzz": 35742, "zzzz</w>": 43103, "{": 90, "{</w>": 346, "{}</w>": 39025, "|": 91, "| "|</w>": 347, "|@</w>": 41677, "||</w>": 7566, "}": 92, "}</w>": 348, "~": 93, "~!</w>": 31181, "~\"</w>": 48442, "~</w>": 349, "~></w>": 43291, "~@</w>": 44247, "~~": 11461, "~~</w>": 16671, "~~~</w>": 32472, "~~~~": 28295, "¡": 94, "¡</w>": 350, "¡ï¸ı": 15113, "¡ï¸ı</w>": 4174, "¡ľ": 43991, "¢": 95, "¢</w>": 351, "£": 96, "£</w>": 352, "£ï¸ı</w>": 18446, "¤": 97, "¤</w>": 353, "¥": 98, "¥</w>": 354, "¦": 99, "¦</w>": 355, "¦Ī</w>": 47615, "§": 100, "§</w>": 356, "¨": 101, "¨</w>": 357, "©": 102, "©</w>": 358, "ª": 103, "ª</w>": 359, "«": 104, "«</w>": 360, "¬": 105, "¬</w>": 361, "¬ë": 31736, "®": 106, "®</w>": 362, "¯": 107, "¯</w>": 363, "°": 108, "°:</w>": 21787, "°</w>": 364, "°ï¸ı</w>": 34777, "±": 109, "±</w>": 365, "±ï¸ı</w>": 41020, "²": 110, "²</w>": 366, "³": 111, "³</w>": 367, "³ï¸ı": 22195, "³ï¸ı</w>": 24706, "´": 112, "´</w>": 368, "µ": 113, "µ</w>": 369, "µï¸ı</w>": 27605, "¶": 114, "¶</w>": 370, "·": 115, "·</w>": 371, "¸": 116, "¸</w>": 372, "¸ë": 19693, "¹": 117, "¹</w>": 373, "º": 118, "º</w>": 374, "»": 119, "»</w>": 375, "¼": 120, "¼</w>": 376, "½": 121, "½</w>": 377, "½ï¸ı</w>": 31333, "¾": 122, "¾</w>": 378, "¿": 123, "¿</w>": 379, "À": 124, "À</w>": 380, "Á": 125, "Á</w>": 381, "Â": 126, "Â</w>": 382, "¡": 26868, "¡</w>": 10830, "¡¡</w>": 45505, "¢</w>": 41359, "£": 31117, "£</w>": 1950, "Â¥</w>": 20199, "¨": 19957, "¨¨": 23089, "¨¨¨¨": 41223, "©": 31148, "©</w>": 5811, "«</w>": 14434, "®": 30857, "®</w>": 8436, "¯": 38682, "¯</w>": 43593, "¯\\": 44096, "¯\\_(</w>": 45115, "°": 21305, "°</w>": 6858, "²</w>": 41175, "´": 30560, "´</w>": 12559, "·</w>": 14844, "º</w>": 28059, "»": 31642, "»</w>": 7599, "½</w>": 33613, "¿": 44559, "¿</w>": 17133, "ÂŃ</w>": 22618, "Ã": 127, "Ã</w>": 383, "á": 7261, "á</w>": 22229, "án": 38340, "án</w>": 21385, "â": 26170, "ã": 19339, "ão</w>": 21141, "ä": 10896, "ä</w>": 47276, "än": 42787, "Ã¥": 23176, "æ": 42495, "ç": 10067, "ça</w>": 22711, "è": 12138, "è</w>": 37761, "ère</w>": 30272, "ès</w>": 41210, "é": 3459, "é</w>": 4166, "éal</w>": 45251, "ée</w>": 13489, "és</w>": 20507, "ê": 27515, "ë": 29526, "ë</w>": 40520, "î": 48704, "ï": 35689, "ñ": 6445, "ña</w>": 17753, "ño</w>": 16574, "ños</w>": 40104, "ó": 8891, "ó</w>": 27733, "ón</w>": 13926, "ô": 26815, "ö": 7255, "ö</w>": 37423, "ör": 31762, "ø": 17483, "ø</w>": 45598, "ú": 17963, "ú</w>": 36019, "ü": 6522, "ü</w>": 47177, "ür": 26132, "ÃĹ</w>": 16165, "Ãł": 36149, "Ãł</w>": 21259, "ÃŃ": 8366, "ÃŃ</w>": 23928, "ÃŃa</w>": 16609, "ÃŃn</w>": 33623, "Ä": 128, "Ä</w>": 384, "ı": 18562, "ı</w>": 41901, "Äģ": 23134, "Äĩ</w>": 31719, "Äį": 45414, "ÄŁ": 26540, "Å": 129, "Å</w>": 385, "Å¡": 35621, "ÅĤ": 40419, "Åį": 41267, "ÅŁ": 21254, "ÅŁ</w>": 40706, "Æ": 130, "Æ</w>": 386, "Ç": 131, "Ç</w>": 387, "È": 132, "È</w>": 388, "É": 133, "É</w>": 389, "Ê": 134, "Ê</w>": 390, "Ë": 135, "Ë</w>": 391, "Ì": 136, "Ì</w>": 392, "Ìĩ</w>": 16384, "Í": 137, "Í</w>": 393, "Î": 138, "Î</w>": 394, "Ï": 139, "Ï</w>": 395, "Ïī</w>": 38065, "Ð": 140, "Ð</w>": 396, "а": 16912, "а</w>": 27080, "аÐ": 31090, "в": 39813, "е": 22176, "и": 16701, "иÐ": 29503, "к": 27152, "л": 47611, "м": 38018, "н": 22705, "о": 13506, "о</w>": 29386, "оÐ": 20978, "од": 38416, "оÑĤ": 28599, "п": 26302, "пÑĢи": 46321, "пÑĢиÑĢода</w>": 48150, "Ñ": 141, "Ñ</w>": 397, "ÑĢ": 16370, "ÑĢи": 41092, "ÑĢод": 47039, "ÑĢода</w>": 47929, "Ñģ": 23669, "ÑĤ": 17875, "Ñĥ": 39729, "ÑĦ": 27993, "ÑĦоÑĤ": 35155, "ÑĦоÑĤо</w>": 38981, "Ñĭ": 45001, "Ò": 142, "Ò</w>": 398, "Ó": 143, "Ó</w>": 399, "Ô": 144, "Ô</w>": 400, "Õ": 145, "Õ</w>": 401, "Ö": 146, "Ö</w>": 402, "×": 147, "×</w>": 403, "Ø": 148, "Ø</w>": 404, "ا": 6042, "ا</w>": 22625, "اØ": 13189, "ار": 40137, "اÙ": 8453, "اÙĦ": 12973, "اÙħ": 47626, "اÙĨ": 42773, "اÙĨ</w>": 33200, "ب": 16378, "ب</w>": 35330, "Ø©</w>": 20915, "ت": 18197, "ت</w>": 44333, "ج": 26375, "Ø®": 41495, "د": 19872, "د</w>": 35566, "ر": 10948, "ر</w>": 24933, "رÙĬ": 43273, "ز": 36169, "س": 17856, "Ø´": 28770, "ص": 27271, "Ø·": 32050, "ع": 18843, "غ": 48510, "ØŃ": 25722, "Ù": 149, "Ù</w>": 405, "Ùģ": 24112, "ÙĤ": 27585, "Ùĥ": 33499, "ÙĦ": 14251, "ÙĦ</w>": 37899, "Ùħ": 12986, "Ùħ</w>": 29945, "ÙĨ": 16655, "ÙĨ</w>": 25386, "Ùĩ": 34274, "Ùĩ</w>": 31343, "ÙĪ": 12203, "ÙĪ</w>": 38310, "ÙĪر": 48242, "ÙĬ": 12046, "ÙĬ</w>": 23853, "Ú": 150, "Ú</w>": 406, "Ú©": 26475, "Û": 151, "Û</w>": 407, "Ûģ": 40480, "ÛĮ": 21452, "ÛĮ</w>": 32703, "Ü": 152, "Ü</w>": 408, "Ý": 153, "Ý</w>": 409, "Þ": 154, "Þ</w>": 410, "ß": 155, "ß</w>": 411, "à": 156, "à</w>": 412, "à¤": 3124, "त</w>": 27263, "द</w>": 29552, "न</w>": 26090, "प</w>": 44149, "ब</w>": 43599, "म": 48254, "म</w>": 26774, "य</w>": 37299, "र": 39136, "र</w>": 19052, "ल</w>": 30881, "व</w>": 39545, "श</w>": 43181, "स</w>": 28505, "ह</w>": 29446, "ा": 37973, "ा</w>": 13343, "ि</w>": 26721, "à¤Ĥ</w>": 30833, "à¤ķ</w>": 22067, "à¤Ĺ</w>": 42598, "à¤ľ</w>": 39561, "à¥": 7410, "à¥Ģ": 45791, "à¥Ģ</w>": 25751, "à¥ģ</w>": 39653, "à¥ĩ": 48612, "à¥ĩ</w>": 25130, "à¥ĭ</w>": 34452, "à¥į</w>": 19389, "à¦": 11322, "া</w>": 41532, "à§": 26339, "à¨": 15741, "à©": 32086, "àª": 22990, "à«": 48347, "à¬": 32791, "à®": 6022, "த</w>": 34691, "ன</w>": 43394, "ப</w>": 47388, "à®®</w>": 35463, "à®°</w>": 43270, "ல</w>": 47705, "ா</w>": 32831, "ி</w>": 27126, "à®ķ</w>": 36168, "à®Ł</w>": 45263, "à¯": 11259, "à¯ģ</w>": 33115, "à¯į</w>": 16631, "à°": 12100, "à±": 23550, "à±į</w>": 46098, "à²": 9992, "ಿ</w>": 47797, "à³": 20745, "à³į</w>": 36148, "à´": 15418, "àµ": 27392, "àµį</w>": 45266, "à¶": 29881, "à·": 30766, "à¸": 1777, "ม": 26137, "ม</w>": 29570, "ย": 27241, "ย</w>": 33091, "ร": 32225, "ร</w>": 27331, "ล": 34696, "ล</w>": 32746, "ว": 26990, "ว</w>": 30245, "ส": 37883, "ส</w>": 35737, "ห": 33064, "ะ": 43920, "ะ</w>": 49234, "ั</w>": 14978, "า": 11529, "า</w>": 38476, "าà¸": 12330, "ิ</w>": 17092, "ี": 22421, "ี</w>": 20278, "ีà¹Ī</w>": 31511, "ื</w>": 47991, "ุ</w>": 30524, "ู</w>": 35273, "à¸ģ</w>": 30767, "à¸ģà¸": 31474, "à¸Ħ</w>": 31757, "à¸Ħà¸": 39628, "à¸ĩ": 24603, "à¸ĩ</w>": 33382, "à¸Ī": 47608, "à¸Ĭ</w>": 46324, "à¸Ķ": 31107, "à¸Ķ</w>": 38825, "à¸ķ": 40273, "à¸ķ</w>": 41108, "à¸Ĺ</w>": 36171, "à¸Ļ": 17474, "à¸Ļ</w>": 17639, "à¸Ļà¸": 23121, "à¸ļ": 33859, "à¸ļ</w>": 39616, "à¸ŀ</w>": 48171, "à¸Ń": 13398, "à¸Ń</w>": 32818, "à¸Ńà¸": 14649, "à¸Ńà¸ĩ": 46622, "à¹": 4484, "à¹Ģ": 13729, "à¹Ģà¸": 14076, "à¹ģà¸": 23916, "à¹Ĥ": 33118, "à¹ĥ": 40962, "à¹Ħà¸": 31718, "à¹ĩ</w>": 38699, "à¹Ī</w>": 11722, "à¹ī</w>": 13123, "à¹Į</w>": 28353, "à¼": 46186, "à½": 39219, "á": 157, "á</w>": 413, "á´": 19036, "áµ": 17330, "áĢ": 45932, "áĥ": 24829, "áĥ¦</w>": 32193, "â": 158, "â</w>": 414, "â¤": 25087, "⤵ï¸ı</w>": 36026, "â¬": 7930, "â¬ħï¸ı</w>": 42111, "â¬Ĩ": 27718, "â¬Ĩï¸ı</w>": 32798, "â¬ĩ": 10917, "â¬ĩ</w>": 39370, "â¬ĩï¸ı": 25621, "â¬ĩï¸ı</w>": 13984, "â¬ĩï¸ıâ¬ĩï¸ı": 40159, "âĢ": 728, "âĢ¢": 9485, "âĢ¢</w>": 2701, "âĢ¢âĢ¢": 15006, "âĢ¢âĢ¢</w>": 47575, "âĢ¢âĢ¢âĢ¢âĢ¢": 27502, "âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢": 48630, "âĢ¦": 7095, "âĢ¦\"</w>": 20215, "âĢ¦..</w>": 47779, "âĢ¦.</w>": 18615, "âĢ¦/</w>": 29842, "âĢ¦</w>": 959, "âĢ¦âĢ¦</w>": 40066, "âĢ²</w>": 32633, "âĢ³</w>": 25061, "âĢ¼": 6578, "âĢ¼ï¸ı": 15622, "âĢ¼ï¸ı</w>": 8310, "âĢ¼ï¸ıâĢ¼ï¸ı</w>": 33218, "âĢĭ": 17086, "âĢĭ</w>": 9844, "âĢį": 4244, "âĢįâĻ": 5177, "âĢįâĻĢï¸ı": 18897, "âĢįâĻĢï¸ı</w>": 9605, "âĢįâĻĤ": 8832, "âĢįâĻĤï¸ı": 21779, "âĢįâĻĤï¸ı</w>": 10613, "âĢİ</w>": 31001, "âĢIJ</w>": 34512, "âĢĵ": 21070, "âĢĵ</w>": 1224, "âĢĶ": 6718, "âĢĶ</w>": 2005, "âĢĶ></w>": 26341, "âĢĶ@</w>": 28470, "âĢĶâĢĶ": 10037, "âĢĶâĢĶ</w>": 44800, "âĢĶâĢĶâĢĶâĢĶ": 17797, "âĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶ": 34432, "âĢķ</w>": 14236, "âģ": 1667, "âģ£": 31089, "âģ£</w>": 16845, "âģ¦": 2773, "âģ¦</w>": 34855, "âģ¦@</w>": 2859, "âģ¦âģ¦@</w>": 27783, "âģ©": 20097, "âģ©,</w>": 48749, "âģ©.</w>": 35777, "âģ©</w>": 2918, "âģīï¸ı</w>": 46534, "âģł": 23881, "âģł</w>": 13503, "âģłâģł</w>": 33488, "âĤ": 5227, "âĤ¬": 34919, "âĤ¬</w>": 6309, "âĤ¹</w>": 21777, "âĥ": 2805, "âĥ£": 11250, "âĥ£</w>": 3076, "âĥ£@</w>": 48291, "âĦ": 8604, "âĦ¢": 29438, "âĦ¢</w>": 11675, "âĦ¹</w>": 45462, "âĨ": 6059, "âĨĴ</w>": 7481, "âĨĵ</w>": 41603, "âĩ": 27228, "âĪ": 17788, "âī": 22684, "âīĪ</w>": 45451, "âĮ": 17848, "âĮļ": 31301, "âĮļï¸ı</w>": 35931, "âı": 7960, "âı©</w>": 40847, "âı°</w>": 12714, "âı±</w>": 33149, "âı³</w>": 47617, "âĵ": 27400, "âĶ": 13389, "âĶĢ": 45139, "âĶģ": 42022, "âķ": 17027, "âķIJ": 48039, "âĸ": 4168, "âĸª": 21203, "âĸª</w>": 36628, "âĸªï¸ı</w>": 24974, "âĸ«": 39478, "âĸ¬": 33798, "âĸ¬âĸ¬": 36975, "âĸ¶": 12509, "âĸ¶</w>": 21126, "âĸ¶ï¸ı</w>": 14442, "âĸº": 46061, "âĸº</w>": 12086, "âĸ½": 45634, "âĸł</w>": 36791, "âĹ": 9323, "âĹĨ</w>": 48961, "âĹı</w>": 26999, "âĺ": 1741, "âĺ®</w>": 45851, "âĺ¹": 28811, "âĺ¹ï¸ı</w>": 39605, "âĺº": 5010, "âĺº</w>": 8703, "âĺºâĺº</w>": 46051, "âĺºï¸ı": 11506, "âĺºï¸ı</w>": 7779, "âĺºï¸ıâĺºï¸ı</w>": 41315, "âĺ¼</w>": 38877, "âĺĢ": 32146, "âĺĢ</w>": 22242, "âĺĢï¸ı": 12817, "âĺĢï¸ı</w>": 8219, "âĺĢï¸ıâĺĢï¸ı": 44550, "âĺģ": 25195, "âĺģï¸ı</w>": 35197, "âĺĥ": 38972, "âĺħ": 9339, "âĺħ</w>": 10643, "âĺħâĺħ": 12681, "âĺħâĺħ</w>": 36644, "âĺħâĺħâĺħâĺħ": 34431, "âĺħâĺħâĺħâĺħ</w>": 44034, "âĺħâĺħâĺħâĺħâĺħ</w>": 45984, "âĺĨ": 23941, "âĺĨ</w>": 13439, "âĺİ": 24045, "âĺİ</w>": 45493, "âĺİï¸ı</w>": 27219, "âĺij": 20983, "âĺij</w>": 42300, "âĺijï¸ı</w>": 22291, "âĺĶï¸ı</w>": 31238, "âĺķ": 11454, "âĺķ</w>": 26561, "âĺķï¸ı": 25839, "âĺķï¸ı</w>": 15499, "âĺĺ": 23483, "âĺĺï¸ı</w>": 31454, "âĺĿ": 21982, "âĺĿï¸ı</w>": 38891, "âĺŀ</w>": 31255, "âĺłï¸ı</w>": 34672, "âĻ": 1548, "âĻ¡": 11091, "âĻ¡</w>": 6251, "âĻ¡âĻ¡": 22360, "âĻ¡âĻ¡</w>": 34267, "âĻ¡âĻ¡âĻ¡</w>": 36611, "âĻ¤</w>": 47435, "âĻ¥": 4622, "âĻ¥</w>": 3405, "âĻ¥âĻ¥": 12975, "âĻ¥âĻ¥</w>": 19604, "âĻ¥âĻ¥âĻ¥</w>": 23255, "âĻ¥âĻ¥âĻ¥âĻ¥": 49020, "âĻ¥ï¸ı": 17774, "âĻ¥ï¸ı</w>": 10561, "âĻ¥ï¸ıâĻ¥ï¸ı": 40309, "âĻ¦": 32376, "âĻ¦</w>": 47547, "âĻ©": 30339, "âĻ©âĻ«</w>": 31636, "âĻª": 27364, "âĻª</w>": 12382, "âĻ«": 39217, "âĻ«</w>": 10814, "âĻ¬</w>": 24753, "âĻ»": 39611, "âĻ»ï¸ı</w>": 46075, "âļ": 2234, "âļ¡": 40098, "âļ¡</w>": 20712, "âļ¡ï¸ı": 19500, "âļ¡ï¸ı</w>": 11605, "âļ¡ï¸ıâļ¡ï¸ı</w>": 45922, "âļª": 11922, "âļª</w>": 36373, "âļªï¸ı": 22251, "âļªï¸ı</w>": 17885, "âļ«": 15374, "âļ«ï¸ı": 26529, "âļ«ï¸ı</w>": 24649, "âļ½": 4867, "âļ½</w>": 13173, "âļ½âļ½": 43259, "âļ½ï¸ı": 11342, "âļ½ï¸ı</w>": 6768, "âļ½ï¸ıâļ½ï¸ı": 30358, "âļ½ï¸ıâļ½ï¸ı</w>": 44148, "âļ¾": 11314, "âļ¾</w>": 34717, "âļ¾ï¸ı": 24727, "âļ¾ï¸ı</w>": 14858, "âļĵ": 23522, "âļĵï¸ı</w>": 35299, "âļĶï¸ı</w>": 29361, "âļľ": 47491, "âļł</w>": 39203, "âļłï¸ı": 40966, "âļłï¸ı</w>": 15596, "âĽ": 7956, "âĽ³ï¸ı</w>": 29204, "âĽĦ": 30668, "âĽĦï¸ı</w>": 45465, "âľ": 1508, "⾨": 7181, "⾨</w>": 3531, "⾨⾨": 35174, "⾨⾨</w>": 21985, "⾨⾨⾨</w>": 39424, "âľĤ": 38602, "âľħ": 29544, "âľħ</w>": 5564, "âľĪ": 10682, "âľĪ</w>": 30712, "âľĪï¸ı": 26176, "âľĪï¸ı</w>": 13413, "âľĬ": 12392, "âľĬ</w>": 17819, "âľĬðŁı½</w>": 48547, "âľĬðŁı¾</w>": 41185, "âľĭ": 39383, "âľĭ</w>": 30239, "âľĮ": 6419, "âľĮ</w>": 12656, "âľĮï¸ı": 21906, "âľĮï¸ı</w>": 12239, "âľĮðŁı»</w>": 30538, "âľĮðŁı¼</w>": 30588, "âľį": 20872, "âľįï¸ı</w>": 30888, "âľı": 32574, "âľıï¸ı</w>": 40724, "âľĵ</w>": 36700, "âľĶ": 47200, "âľĶ</w>": 13749, "âľĶï¸ı": 40544, "âľĶï¸ı</w>": 9191, "âľĸï¸ı</w>": 44133, "âľĿ": 42220, "âĿ": 1045, "âĿ£": 37007, "âĿ£</w>": 25623, "âĿ£ï¸ı</w>": 25240, "âĿ¤": 1266, "âĿ¤</w>": 2720, "âĿ¤âĿ¤": 9033, "âĿ¤âĿ¤</w>": 14058, "âĿ¤âĿ¤âĿ¤</w>": 16708, "âĿ¤âĿ¤âĿ¤âĿ¤": 37918, "âĿ¤âĿ¤âĿ¤âĿ¤</w>": 43970, "âĿ¤ï¸ı": 2626, "âĿ¤ï¸ı "âĿ¤ï¸ı.</w>": 45326, "âĿ¤ï¸ı</w>": 1752, "âĿ¤ï¸ı@</w>": 31187, "âĿ¤ï¸ıâĿ¤ï¸ı": 6713, "âĿ¤ï¸ıâĿ¤ï¸ı</w>": 10363, "âĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ı</w>": 12282, "âĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ı": 39167, "âĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ı</w>": 29880, "âĿ¤ï¸ıðŁĴĻ</w>": 37380, "âĿ¤ï¸ıðŁĺį</w>": 37272, "âĿ¤ï¸ıðŁĺĺ</w>": 41800, "âĿ¤ðŁĺį</w>": 49120, "âĿ¥</w>": 36914, "âĿĦ": 8501, "âĿĦ</w>": 30494, "âĿĦï¸ı": 16834, "âĿĦï¸ı</w>": 12402, "âĿĦï¸ıâĿĦï¸ı": 41626, "âĿĮ": 44485, "âĿĮ</w>": 17975, "âĿĵ</w>": 29791, "âĿĹ": 12868, "âĿĹ</w>": 29079, "âĿĹï¸ı": 28642, "âĿĹï¸ı</w>": 17391, "âĿĿ</w>": 46951, "âŀ": 3257, "âŀ¡</w>": 12854, "âŀ¡ï¸ı": 31860, "âŀ¡ï¸ı</w>": 4956, "âŀ¤</w>": 18651, "âŀķ</w>": 46526, "âŀĸ": 21327, "âŀĸ</w>": 34902, "âŀĸâŀĸ": 23316, "âŀĸâŀĸâŀĸâŀĸ": 40401, "âŀľ</w>": 23775, "âł": 5689, "âłĢ": 9691, "âłĢ</w>": 8621, "âłĢâłĢ": 11466, "âłĢâłĢ</w>": 39092, "âłĢâłĢâłĢâłĢ": 20976, "âłĢâłĢâłĢâłĢâłĢâłĢâłĢâłĢ": 46063, "âŃ": 5527, "âŃIJ": 6410, "âŃIJ</w>": 19012, "âŃIJâŃIJ": 32663, "âŃIJï¸ı": 12427, "âŃIJï¸ı</w>": 10251, "âŃIJï¸ıâŃIJï¸ı": 18640, "âŃIJï¸ıâŃIJï¸ıâŃIJï¸ı</w>": 40746, "ã": 159, "ã</w>": 415, "ãĢ": 4092, "ãĢģ</w>": 45262, "ãĢĤ": 38060, "ãĢĤ</w>": 38000, "ãĢĬ</w>": 39920, "ãĢĭ</w>": 32898, "ãĢĮ</w>": 18116, "ãĢį</w>": 19149, "ãĢİ</w>": 26947, "ãĢı</w>": 30293, "ãĢIJ</w>": 12534, "ãĢij</w>": 12990, "ãĢľ</w>": 39581, "ãģ": 4813, "ãģ¦": 48029, "ãģ¨": 34671, "ãģ¨ç¹ĭãģ": 47310, "ãģ¨ç¹ĭãģĮãĤĬãģŁãģĦ</w>": 48290, "ãģª": 29104, "ãģ®": 21575, "ãģ·": 44130, "ãģĦ": 33523, "ãģĦ</w>": 38850, "ãģĨ": 44235, "ãģį": 42184, "ãĤ": 3909, "ãĤ¢": 26560, "ãĤ¤": 19319, "ãĤ¤ãĥ": 36294, "ãĤ«": 37367, "ãĤ¯": 31574, "ãĤ·": 37665, "ãĤ¸": 32234, "ãĤ¸ãĥ": 43491, "ãĤ¹": 22694, "ãĤ¹</w>": 39220, "ãĤ¹ãĥ": 32421, "ãĤ¿": 34941, "ãĤĬãģ": 40500, "ãĤĮ</w>": 45211, "ãĤŃ": 47121, "ãĥ": 2429, "ãĥ©": 23007, "ãĥª": 32115, "ãĥ«": 33257, "ãĥ¬": 32965, "ãĥ³": 17671, "ãĥ³</w>": 26875, "ãĥ³ãĤ": 45105, "ãĥ³ãĥ": 25914, "ãĥ»": 8415, "ãĥ»</w>": 11158, "ãĥ»ãĥ»": 13949, "ãĥ»ãĥ»ãĥ»</w>": 14234, "ãĥ¼": 13457, "ãĥ¼</w>": 30391, "ãĥ¼ãĥ": 18584, "ãĥĥ": 28902, "ãĥĦ</w>": 32173, "ãĥĪ": 42384, "ãĥİ</w>": 39967, "ãĥķãĤ": 33371, "ãĥŀ": 48924, "ãĥŃ": 35827, "ãħ": 5947, "ãħ¤": 21096, "ãħ¤ãħ¤": 22583, "ãħ¤ãħ¤ãħ¤ãħ¤": 39329, "ãħĭ": 13052, "ãħĭ</w>": 25108, "ãħĭãħĭ": 16604, "ãħĭãħĭ</w>": 42581, "ãħĭãħĭãħĭ</w>": 46407, "ãħĭãħĭãħĭãħĭ": 39362, "ãħł": 16089, "ãħł</w>": 25781, "ãħłãħł": 22021, "ãħłãħł</w>": 34398, "ãħłãħłãħłãħł": 47028, "ä": 160, "ä</w>": 416, "ä¸": 19759, "ä¹": 41854, "äº": 21078, "人": 36839, "ä»": 37743, "ä½": 47466, "å": 161, "å</w>": 417, "å¤": 23170, "å¥": 29290, "å®": 27047, "å°": 34720, "å±": 46096, "å¸": 42021, "å¹": 38780, "åħ": 34314, "åĨ": 27972, "åĨĻ": 44653, "åĪ": 42748, "åĭ": 47505, "åı": 34517, "åIJ": 41673, "åĽ": 39027, "åľ": 37746, "åŃ": 35751, "æ": 162, "æ</w>": 418, "æĸ": 29032, "æĹ": 22265, "æĹ¥": 39121, "æĹ¥</w>": 37156, "æĺ": 42891, "æĻ": 48132, "æľ": 19277, "æľ¬": 44353, "æĿ": 27667, "æĿ±": 48338, "ç": 163, "ç</w>": 419, "ç¥": 26369, "ç¥Ń</w>": 42557, "çµ": 37810, "ç¹": 43431, "ç¹ĭãģ": 45930, "çĶ": 20211, "çĶŁ": 33375, "çľ": 33440, "羣": 41570, "è": 164, "è</w>": 420, "èª": 34002, "èªķ": 41293, "é": 165, "é</w>": 421, "éģ": 44854, "éĩ": 38283, "ê": 166, "ê</w>": 422, "ê°": 21122, "ê°ĵ": 41076, "ê°ĵìĦ¸ë¸IJ</w>": 41689, "ê°ķ": 45758, "ê²": 35555, "ê³": 36216, "êµ": 31871, "ê·": 42680, "ê¸": 32495, "ê¹": 24531, "ê¹Ģ": 25203, "ë": 167, "ë</w>": 423, "ë¦": 24621, "리": 47649, "ë§": 28024, "ë§Ī": 40027, "ëª": 36311, "ë¯": 19528, "민": 34442, "민</w>": 44632, "ë°": 15810, "ë°©": 23273, "ë°©íĥ": 25081, "ë°©íĥĦ": 25641, "ë°©íĥĦìĨĮëħĦëĭ": 26068, "ë°©íĥĦìĨĮëħĦëĭ¨</w>": 27129, "ë°ķ": 40988, "ë²": 48267, "ë³": 44693, "ë¹": 24193, "ëĤ": 27252, "ëĤĺ": 48484, "ëĭ": 13094, "ëĭ¤": 46680, "ëĭĪ": 33708, "ëį": 45543, "ëı": 31972, "ëĵ": 30850, "ëĿ": 44317, "ì": 168, "ì</w>": 424, "ì£": 39856, "주": 45161, "ì¤": 31153, "ì§": 16279, "ì§Ģ": 28836, "ì§Ħ</w>": 38890, "ì°": 40742, "ì¶": 42476, "ì¶ķ": 46403, "ì¶ķíķĺ": 47866, "ì¹": 45088, "ìĤ": 31061, "ìĥ": 30587, "ìĥĿ": 47858, "ìĦ": 15074, "ìĦ¸ë": 29254, "ìĦ¸ë¸": 29658, "ìĦ¸ë¸IJ</w>": 41415, "ìĨ": 15115, "ìĨĮë": 20515, "ìĨĮëħ": 21391, "ìĨĮëħĦëĭ": 25887, "ìĪ": 32757, "ìĬ": 12125, "ìĬ¤": 20305, "ìĬ¤</w>": 23829, "ìĭ": 23924, "ìķ": 16071, "ìķĦ": 23233, "ìĸ": 31625, "ìĹ": 13252, "ìĹIJ": 37622, "ìĹij": 31036, "ìĹijìĨ": 42763, "ìĹijìĨĮ</w>": 45606, "ìĺ": 21144, "ìĻ": 39405, "ìļ": 18541, "ìļ°": 38415, "ìļ°</w>": 49344, "ìĽ": 22543, "ìĽIJ</w>": 36495, "ìľ": 20909, "ìľł": 42890, "ìĿ": 8276, "ìĿ´": 12286, "ìĿ´</w>": 34746, "ìĿ´ì": 37590, "ìĿ¼": 43406, "ìŀ": 20849, "ìł": 20580, "ìłķ": 34725, "í": 169, "í</w>": 425, "íģ": 35641, "íģ¬</w>": 45832, "íĤ": 43565, "íĥ": 15012, "íĥĢ": 41126, "íĥľ": 37663, "íĬ": 23215, "íĬ¸": 48974, "íĬ¸</w>": 39820, "íĭ": 34350, "íĶ": 29450, "íķ": 15197, "íķ´</w>": 35286, "íķĺ": 33992, "íĺ": 15962, "íĺ¸</w>": 39657, "íĺĦ</w>": 34645, "íĻ": 31882, "î": 170, "î</w>": 426, "îĢ": 36288, "îĦ": 35368, "îĮ": 41006, "îIJ": 16929, "îIJĴ": 40100, "ï": 171, "ï</w>": 427, "ï¸": 842, "ï¸İ</w>": 24029, "ï¸ı": 1392, "ï¸ı "ï¸ı:</w>": 32604, "ï¸ı</w>": 1001, "ï¸ı@</w>": 34600, "ï¸ıâĥ£": 17394, "ï¸ıâĥ£-</w>": 40376, "ï¸ıâĥ£</w>": 4603, "ï¿": 27850, "�": 47356, "�</w>": 39802, "ð": 172, "ð</w>": 428, "ðĿ": 6874, "ðĿIJ": 15889, "ðĿij": 43794, "ðĿĴ": 43387, "ðĿĵ": 47110, "ðĿĹ": 18865, "ðĿĺ": 26109, "ðĿĻ": 29415, "ðŁ": 558, "ðŁ¤": 1793, "ðŁ¤£": 9665, "ðŁ¤£</w>": 9909, "ðŁ¤£ðŁ¤£": 16430, "ðŁ¤£ðŁ¤£</w>": 31009, "ðŁ¤£ðŁ¤£ðŁ¤£</w>": 32262, "ðŁ¤¤": 39550, "ðŁ¤¤</w>": 26759, "ðŁ¤¦": 17186, "ðŁ¤§</w>": 40983, "ðŁ¤©": 27351, "ðŁ¤©</w>": 16074, "ðŁ¤ª": 44230, "ðŁ¤ª</w>": 24920, "ðŁ¤«</w>": 47671, "ðŁ¤¯</w>": 37595, "ðŁ¤·": 13185, "ðŁ¤·ðŁı»âĢįâĻĢï¸ı</w>": 46770, "ðŁ¤ij</w>": 34801, "ðŁ¤ĵ": 36580, "ðŁ¤ĵ</w>": 18928, "ðŁ¤Ķ": 12706, "ðŁ¤Ķ</w>": 6497, "ðŁ¤ĶðŁ¤Ķ</w>": 28490, "ðŁ¤ĶðŁ¤ĶðŁ¤Ķ</w>": 43361, "ðŁ¤ĸ</w>": 46146, "ðŁ¤Ĺ": 16646, "ðŁ¤Ĺ</w>": 10465, "ðŁ¤ĹðŁ¤Ĺ</w>": 44321, "ðŁ¤ĺ": 10623, "ðŁ¤ĺ</w>": 17288, "ðŁ¤ĺðŁı»": 46449, "ðŁ¤ĺðŁı»</w>": 30891, "ðŁ¤ĺðŁı¼</w>": 31458, "ðŁ¤ĺðŁı½</w>": 49362, "ðŁ¤Ļ": 23800, "ðŁ¤Ļ</w>": 39101, "ðŁ¤Ŀ</w>": 35242, "ðŁ¤ŀ": 29463, "ðŁ¤ŀ</w>": 38597, "ðŁ¤Ł": 48509, "ðŁ¤ł</w>": 36737, "ðŁ¤Ń</w>": 47289, "ðŁ¥": 4156, "ðŁ¥°": 29246, "ðŁ¥°</w>": 17597, "ðŁ¥³": 45823, "ðŁ¥³</w>": 28055, "ðŁ¥º": 43380, "ðŁ¥º</w>": 36858, "ðŁ¥Ĥ": 43805, "ðŁ¥Ĥ</w>": 25212, "ðŁ¥ĥ</w>": 47790, "ðŁ¥ĩ": 34372, "ðŁ¥ĩ</w>": 20069, "ðŁ¥Ī</w>": 35858, "ðŁ¥ī</w>": 36782, "ðŁ¥Ĭ</w>": 29275, "ðŁ¦": 6040, "ðŁ¦ģ": 36367, "ðŁ¦ģ</w>": 26056, "ðŁ¦ĥ</w>": 40184, "ðŁ¦Ħ</w>": 37659, "ðŁ¦ħ</w>": 28800, "ðŁ¦Ī</w>": 48984, "ðŁ¦ĭ": 49325, "ðŁ¦ĭ</w>": 28985, "ðŁ§": 8792, "ðŁ§¡": 30996, "ðŁ§¡</w>": 24578, "ðŁ§IJ</w>": 33549, "ðŁħ": 22010, "ðŁĨ": 9536, "ðŁĨķ</w>": 34956, "ðŁĨĺ</w>": 39868, "ðŁĨļ</w>": 16325, "ðŁĩ": 1173, "ðŁĩ¦": 12469, "ðŁĩ¦</w>": 28565, "ðŁĩ¦ðŁĩ": 33196, "ðŁĩ¦ðŁĩ·</w>": 41629, "ðŁĩ¦ðŁĩº</w>": 25192, "ðŁĩ§": 14660, "ðŁĩ§ðŁĩ": 37342, "ðŁĩ§ðŁĩª</w>": 38794, "ðŁĩ§ðŁĩ·</w>": 28182, "ðŁĩ¨": 8889, "ðŁĩ¨ðŁĩ": 8989, "ðŁĩ¨ðŁĩ¦": 34324, "ðŁĩ¨ðŁĩ¦</w>": 16364, "ðŁĩ¨ðŁĩ³</w>": 36819, "ðŁĩ¨ðŁĩŃ</w>": 41119, "ðŁĩ©": 15222, "ðŁĩ©ðŁĩ": 36350, "ðŁĩ©ðŁĩª</w>": 21531, "ðŁĩª": 11428, "ðŁĩª</w>": 12331, "ðŁĩªðŁĩ": 13917, "ðŁĩªðŁĩ¸</w>": 22177, "ðŁĩªðŁĩº</w>": 34655, "ðŁĩ«": 12977, "ðŁĩ«ðŁĩ·": 39109, "ðŁĩ«ðŁĩ·</w>": 16223, "ðŁĩ¬": 8129, "ðŁĩ¬ðŁĩ": 8354, "ðŁĩ¬ðŁĩ§": 23762, "ðŁĩ¬ðŁĩ§</w>": 11559, "ðŁĩ®": 8268, "ðŁĩ®ðŁĩ": 8347, "ðŁĩ®ðŁĩª</w>": 34148, "ðŁĩ®ðŁĩ³": 47299, "ðŁĩ®ðŁĩ³</w>": 23602, "ðŁĩ®ðŁĩ¹": 42034, "ðŁĩ®ðŁĩ¹</w>": 17070, "ðŁĩ¯": 20090, "ðŁĩ¯ðŁĩ": 22924, "ðŁĩ¯ðŁĩµ</w>": 26527, "ðŁĩ°": 28232, "ðŁĩ±": 29533, "ðŁĩ±ðŁĩ": 40941, "ðŁĩ²": 16411, "ðŁĩ²ðŁĩ": 17562, "ðŁĩ²ðŁĩ½</w>": 32073, "ðŁĩ³": 16645, "ðŁĩ³ðŁĩ": 17747, "ðŁĩ³ðŁĩ±</w>": 36747, "ðŁĩµ": 12127, "ðŁĩµðŁĩ": 13608, "ðŁĩµðŁĩ°</w>": 37764, "ðŁĩµðŁĩ¹</w>": 42621, "ðŁĩµðŁĩŃ</w>": 42777, "ðŁĩ·": 16026, "ðŁĩ·</w>": 9869, "ðŁĩ·ðŁĩº</w>": 37902, "ðŁĩ¸": 19447, "ðŁĩ¸ðŁĩ": 33325, "ðŁĩ¸ðŁĩª</w>": 39260, "ðŁĩ¹": 21810, "ðŁĩ¹ðŁĩ": 36250, "ðŁĩº": 4054, "ðŁĩº</w>": 17467, "ðŁĩºðŁĩ": 4131, "ðŁĩºðŁĩ¸": 8907, "ðŁĩºðŁĩ¸</w>": 5688, "ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸": 18739, "ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸</w>": 41411, "ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸</w>": 43357, "ðŁĩ¿": 25520, "ðŁĩ¿ðŁĩ¦</w>": 36982, "ðŁĩŃ": 30370, "ðŁĮ": 1576, "ðŁĮ±": 35318, "ðŁĮ±</w>": 20665, "ðŁĮ²": 34071, "ðŁĮ²</w>": 28154, "ðŁĮ³": 44265, "ðŁĮ³</w>": 28543, "ðŁĮ´": 20643, "ðŁĮ´</w>": 15968, "ðŁĮµ</w>": 40871, "ðŁĮ·": 32328, "ðŁĮ·</w>": 24259, "ðŁĮ¸": 16314, "ðŁĮ¸</w>": 10980, "ðŁĮ¸ðŁĮ¸": 46210, "ðŁĮ¹": 14990, "ðŁĮ¹</w>": 10662, "ðŁĮ¹ðŁĮ¹": 37933, "ðŁĮº": 27608, "ðŁĮº</w>": 19829, "ðŁĮ»": 27196, "ðŁĮ»</w>": 19772, "ðŁĮ¼": 36484, "ðŁĮ¼</w>": 26312, "ðŁĮ¾</w>": 39796, "ðŁĮ¿": 27736, "ðŁĮ¿</w>": 18588, "ðŁĮĢ</w>": 34348, "ðŁĮħ</w>": 27547, "ðŁĮĪ": 23038, "ðŁĮĪ</w>": 13042, "ðŁĮĬ": 20465, "ðŁĮĬ</w>": 14302, "ðŁĮĮ</w>": 43393, "ðŁĮį": 34931, "ðŁĮį</w>": 18641, "ðŁĮİ": 31125, "ðŁĮİ</w>": 16969, "ðŁĮı</w>": 31527, "ðŁĮIJ</w>": 33071, "ðŁĮĻ": 42330, "ðŁĮĻ</w>": 23283, "ðŁĮļ": 49004, "ðŁĮļ</w>": 27877, "ðŁĮŀ": 21152, "ðŁĮŀ</w>": 12980, "ðŁĮŁ": 13196, "ðŁĮŁ</w>": 8542, "ðŁĮŁðŁĮŁ": 26014, "ðŁį": 2011, "ðŁį¦": 47375, "ðŁį¦</w>": 32032, "ðŁį©</w>": 38379, "ðŁįª</w>": 38958, "ðŁį«": 47994, "ðŁį«</w>": 33401, "ðŁį°": 43732, "ðŁį°</w>": 30051, "ðŁį³</w>": 37441, "ðŁį´": 41531, "ðŁį´</w>": 25338, "ðŁį·": 24445, "ðŁį·</w>": 18072, "ðŁį¸": 43058, "ðŁį¸</w>": 31217, "ðŁį¹</w>": 35598, "ðŁįº": 31081, "ðŁįº</w>": 21590, "ðŁį»": 22793, "ðŁį»</w>": 13167, "ðŁį¾": 27294, "ðŁį¾</w>": 21656, "ðŁįĢ": 22865, "ðŁįĢ</w>": 15764, "ðŁįģ": 29837, "ðŁįģ</w>": 23075, "ðŁįĤ": 35015, "ðŁįĤ</w>": 25721, "ðŁįĥ": 27157, "ðŁįĥ</w>": 20147, "ðŁįĩ</w>": 48697, "ðŁįĬ": 35001, "ðŁįĬ</w>": 28036, "ðŁįĭ</w>": 39543, "ðŁįĮ</w>": 44987, "ðŁįį</w>": 48946, "ðŁįİ</w>": 32069, "ðŁįij</w>": 32889, "ðŁįĴ</w>": 33160, "ðŁįĵ": 44739, "ðŁįĵ</w>": 33456, "ðŁįĶ": 46415, "ðŁįĶ</w>": 36031, "ðŁįķ": 31469, "ðŁįķ</w>": 23904, "ðŁįŃ</w>": 42100, "ðŁİ": 1165, "ðŁİ£</w>": 43158, "ðŁİ¤": 23490, "ðŁİ¤</w>": 15690, "ðŁİ¥": 22186, "ðŁİ¥:</w>": 43640, "ðŁİ¥</w>": 13233, "ðŁİ§": 31254, "ðŁİ§</w>": 14266, "ðŁİ¨": 31953, "ðŁİ¨</w>": 13461, "ðŁİ©</w>": 37701, "ðŁİ«</w>": 30331, "ðŁİ¬": 36020, "ðŁİ¬</w>": 18150, "ðŁİ®</w>": 29312, "ðŁİ¯</w>": 23114, "ðŁİµ": 27435, "ðŁİµ</w>": 14946, "ðŁİ¶": 11755, "ðŁİ¶</w>": 6011, "ðŁİ¶ðŁİ¶</w>": 36283, "ðŁİ¸": 29135, "ðŁİ¸</w>": 22122, "ðŁİ¹</w>": 43493, "ðŁİ¼": 34949, "ðŁİ¼</w>": 23757, "ðŁİ¾": 41982, "ðŁİ¾</w>": 24222, "ðŁİĢ": 34347, "ðŁİĢ</w>": 20151, "ðŁİģ": 18368, "ðŁİģ</w>": 13462, "ðŁİĤ": 13026, "ðŁİĤ</w>": 10392, "ðŁİĤðŁİĤ": 39338, "ðŁİĥ": 22622, "ðŁİĥ</w>": 16780, "ðŁİĦ": 12942, "ðŁİĦ</w>": 11267, "ðŁİħ": 17685, "ðŁİħ</w>": 24276, "ðŁİĨ</w>": 39222, "ðŁİĪ": 16142, "ðŁİĪ</w>": 14448, "ðŁİĪðŁİī</w>": 48049, "ðŁİī": 4310, "ðŁİī:</w>": 17310, "ðŁİī</w>": 3986, "ðŁİīðŁİ": 11473, "ðŁİīðŁİĪ": 40499, "ðŁİīðŁİĪ</w>": 34008, "ðŁİīðŁİī": 25159, "ðŁİīðŁİī</w>": 13450, "ðŁİīðŁİīðŁİī</w>": 20828, "ðŁİīðŁİĬ": 31662, "ðŁİīðŁİĬ</w>": 30781, "ðŁİĬ": 22763, "ðŁİĬ</w>": 22425, "ðŁİĬðŁİī</w>": 48801, "ðŁİĵ": 28916, "ðŁİĵ</w>": 18744, "ðŁİĻ": 29001, "ðŁİĻ</w>": 29753, "ðŁİĻï¸ı</w>": 44205, "ðŁİŁ": 19248, "ðŁİŁ</w>": 21107, "ðŁİŁï¸ı</w>": 30243, "ðŁİŃ</w>": 28856, "ðŁı": 1109, "ðŁı¡</w>": 27318, "ðŁı³ï¸ı": 26844, "ðŁı³ï¸ıâĢį": 27093, "ðŁı³ï¸ıâĢįðŁĮĪ</w>": 32610, "ðŁı´": 39690, "ðŁı´</w>": 19704, "ðŁı»": 5042, "ðŁı»</w>": 3702, "ðŁı»âĢį": 46250, "ðŁı»âĢįâĻĢï¸ı": 48391, "ðŁı»âĢįâĻĢï¸ı</w>": 23595, "ðŁı»âĢįâĻĤï¸ı</w>": 30984, "ðŁı¼": 6193, "ðŁı¼</w>": 4027, "ðŁı¼âĢįâĻĢï¸ı</w>": 28955, "ðŁı½": 8514, "ðŁı½</w>": 6114, "ðŁı½âĢįâĻĢï¸ı</w>": 37036, "ðŁı½âĢįâĻĤï¸ı</w>": 43157, "ðŁı¾": 10230, "ðŁı¾</w>": 7778, "ðŁı¾âĢįâĻĤï¸ı</w>": 47189, "ðŁı¿": 29854, "ðŁı¿</w>": 21094, "ðŁıĢ": 13708, "ðŁıĢ</w>": 8813, "ðŁıĢðŁıĢ": 43169, "ðŁıģ": 29423, "ðŁıģ</w>": 17473, "ðŁıĥ": 16820, "ðŁıĥ</w>": 32751, "ðŁıħ</w>": 25500, "ðŁıĨ": 9585, "ðŁıĨ</w>": 5596, "ðŁıĨðŁıĨ": 18946, "ðŁıĨðŁıĨ</w>": 38269, "ðŁıĨðŁıĨðŁıĨ</w>": 44484, "ðŁıĩ": 45789, "ðŁıĩ</w>": 40288, "ðŁıĪ": 16144, "ðŁıĪ</w>": 10477, "ðŁıī</w>": 26020, "ðŁıĬ": 33061, "ðŁıĬ</w>": 47830, "ðŁıĮ": 41116, "ðŁıı</w>": 32460, "ðŁıIJ": 46334, "ðŁıIJ</w>": 29433, "ðŁıĴ</w>": 37756, "ðŁıŁ": 35914, "ðŁıŁ</w>": 26472, "ðŁıŁï¸ı</w>": 42627, "ðŁıł</w>": 33727, "ðŁIJ": 2074, "ðŁIJ¢</w>": 37049, "ðŁIJ£</w>": 39597, "ðŁIJ¥</w>": 42981, "ðŁIJ¦</w>": 37260, "ðŁIJ¬</w>": 44238, "ðŁIJ¯": 34825, "ðŁIJ¯</w>": 26111, "ðŁIJ°": 35378, "ðŁIJ°</w>": 25050, "ðŁIJ±": 35710, "ðŁIJ±</w>": 22979, "ðŁIJ´</w>": 33509, "ðŁIJ¶": 14466, "ðŁIJ¶</w>": 10631, "ðŁIJ·</w>": 38408, "ðŁIJ¸": 45597, "ðŁIJ¸</w>": 40298, "ðŁIJº": 44281, "ðŁIJº</w>": 31445, "ðŁIJ»": 30750, "ðŁIJ»</w>": 25322, "ðŁIJ¼</w>": 46234, "ðŁIJ¾": 16057, "ðŁIJ¾</w>": 11317, "ðŁIJ¾ðŁIJ¾</w>": 42202, "ðŁIJī</w>": 46908, "ðŁIJĬ</w>": 43974, "ðŁIJį": 48903, "ðŁIJį</w>": 30177, "ðŁIJİ": 48281, "ðŁIJİ</w>": 32726, "ðŁIJIJ": 47735, "ðŁIJIJ</w>": 27954, "ðŁIJij</w>": 49389, "ðŁIJķ</w>": 41069, "ðŁIJĺ</w>": 38733, "ðŁIJĿ": 30619, "ðŁIJĿ</w>": 20111, "ðŁIJŁ": 42084, "ðŁIJŁ</w>": 29989, "ðŁIJł</w>": 42725, "ðŁij": 964, "ðŁij£</w>": 39755, "ðŁij§": 48938, "ðŁij¨": 18966, "ðŁij¨âĢį": 25023, "ðŁij©": 18800, "ðŁij©âĢį": 26304, "ðŁij«": 47106, "ðŁij«</w>": 35457, "ðŁij®": 42686, "ðŁij¯": 25910, "ðŁij¯</w>": 20582, "ðŁij¶": 26187, "ðŁij¶</w>": 33189, "ðŁij¸": 26268, "ðŁij¸</w>": 36645, "ðŁij¹</w>": 46766, "ðŁij»": 24625, "ðŁij»</w>": 16243, "ðŁij¼": 25270, "ðŁij¼</w>": 31083, "ðŁij½": 42677, "ðŁij½</w>": 26257, "ðŁijĢ": 11524, "ðŁijĢ</w>": 5908, "ðŁijĢðŁijĢ</w>": 31561, "ðŁijģ": 47796, "ðŁijģ</w>": 45705, "ðŁijĦ</w>": 47445, "ðŁijħ": 31833, "ðŁijħ</w>": 24672, "ðŁijĨ": 42975, "ðŁijĨ</w>": 45194, "ðŁijĩ": 7662, "ðŁijĩ</w>": 7475, "ðŁijĩðŁı»": 45811, "ðŁijĩðŁı»</w>": 32813, "ðŁijĩðŁı¼</w>": 37504, "ðŁijĩðŁijĩ": 17915, "ðŁijĩðŁijĩ</w>": 31891, "ðŁijĩðŁijĩðŁijĩ</w>": 35627, "ðŁijĪ": 32794, "ðŁijĪ</w>": 20832, "ðŁijī": 9477, "ðŁijī</w>": 3988, "ðŁijīðŁı»</w>": 23481, "ðŁijīðŁı¼</w>": 27534, "ðŁijīðŁı½</w>": 38059, "ðŁijīðŁijī</w>": 41480, "ðŁijĬ": 8897, "ðŁijĬ</w>": 9704, "ðŁijĬðŁı»": 47393, "ðŁijĬðŁı»</w>": 29152, "ðŁijĬðŁı¼": 49000, "ðŁijĬðŁı¼</w>": 30115, "ðŁijĬðŁijĬ</w>": 46521, "ðŁijĭ": 19351, "ðŁijĭ</w>": 17686, "ðŁijĮ": 4890, "ðŁijĮ</w>": 4494, "ðŁijĮðŁı»": 31818, "ðŁijĮðŁı»</w>": 18606, "ðŁijĮðŁı¼": 37655, "ðŁijĮðŁı¼</w>": 20031, "ðŁijĮðŁı½</w>": 35834, "ðŁijĮðŁijĮ": 36139, "ðŁijĮðŁijĮ</w>": 21435, "ðŁijĮðŁijĮðŁijĮ</w>": 40876, "ðŁijį": 4686, "ðŁijį</w>": 4201, "ðŁijįðŁı»": 25803, "ðŁijįðŁı»</w>": 15129, "ðŁijįðŁı¼": 37285, "ðŁijįðŁı¼</w>": 19689, "ðŁijįðŁı½</w>": 43722, "ðŁijįðŁijį": 33012, "ðŁijįðŁijį</w>": 18997, "ðŁijįðŁijįðŁijį</w>": 37284, "ðŁijİ": 39702, "ðŁijİ</w>": 32568, "ðŁijı": 3802, "ðŁijı</w>": 4829, "ðŁijıðŁı»": 19236, "ðŁijıðŁı»</w>": 17029, "ðŁijıðŁı»ðŁijıðŁı»": 35254, "ðŁijıðŁı¼": 24496, "ðŁijıðŁı¼</w>": 19979, "ðŁijıðŁı¼ðŁijıðŁı¼</w>": 46712, "ðŁijıðŁı½": 40796, "ðŁijıðŁı½</w>": 33978, "ðŁijıðŁı¾</w>": 45450, "ðŁijıðŁijı": 10356, "ðŁijıðŁijı</w>": 16706, "ðŁijıðŁijıðŁijı</w>": 17254, "ðŁijIJ</w>": 40877, "ðŁijij": 14955, "ðŁijij</w>": 8717, "ðŁijijðŁijij": 48532, "ðŁijķ</w>": 47865, "ðŁijŁ</w>": 41183, "ðŁijł</w>": 41264, "ðŁijŃ": 34175, "ðŁijŃ</w>": 27943, "ðŁĴ": 837, "ðŁĴ¡</w>": 24081, "ðŁĴ£": 36862, "ðŁĴ£</w>": 29006, "ðŁĴ¤": 34706, "ðŁĴ¤</w>": 25632, "ðŁĴ¥": 12209, "ðŁĴ¥</w>": 7347, "ðŁĴ¥ðŁĴ¥": 27396, "ðŁĴ¥ðŁĴ¥</w>": 39246, "ðŁĴ¥ðŁĴ¥ðŁĴ¥</w>": 48890, "ðŁĴ¦": 21180, "ðŁĴ¦</w>": 14060, "ðŁĴ¦ðŁĴ¦</w>": 44469, "ðŁĴ§</w>": 34095, "ðŁĴ¨": 27408, "ðŁĴ¨</w>": 17891, "ðŁĴ©": 48621, "ðŁĴ©</w>": 28847, "ðŁĴª": 5475, "ðŁĴª</w>": 6440, "ðŁĴªðŁı»": 31669, "ðŁĴªðŁı»</w>": 21903, "ðŁĴªðŁı¼": 32041, "ðŁĴªðŁı¼</w>": 20759, "ðŁĴªðŁı½": 46380, "ðŁĴªðŁı½</w>": 31111, "ðŁĴªðŁı¾</w>": 39398, "ðŁĴªðŁĴª</w>": 24747, "ðŁĴªðŁĴªðŁĴª</w>": 39913, "ðŁĴ«": 25770, "ðŁĴ«</w>": 12526, "ðŁĴ¬</w>": 30947, "ðŁĴ¯": 10611, "ðŁĴ¯</w>": 7018, "ðŁĴ¯ðŁĴ¯": 30234, "ðŁĴ¯ðŁĴ¯</w>": 44070, "ðŁĴ°": 20454, "ðŁĴ°</w>": 14078, "ðŁĴ°ðŁĴ°": 41747, "ðŁĴµ": 47412, "ðŁĴµ</w>": 38041, "ðŁĴ¸": 37696, "ðŁĴ¸</w>": 25957, "ðŁĴ»": 33433, "ðŁĴ»</w>": 18135, "ðŁĴ¿</w>": 39541, "ðŁĴĢ": 14888, "ðŁĴĢ</w>": 12158, "ðŁĴĢðŁĴĢ": 30884, "ðŁĴģ": 13997, "ðŁĴģ</w>": 14392, "ðŁĴĥ": 9947, "ðŁĴĥ</w>": 14333, "ðŁĴĥðŁı»</w>": 38624, "ðŁĴĥðŁĴĥ": 28041, "ðŁĴĦ": 46116, "ðŁĴĦ</w>": 34571, "ðŁĴħ": 27457, "ðŁĴħ</w>": 32414, "ðŁĴī": 44316, "ðŁĴī</w>": 30503, "ðŁĴĭ": 12217, "ðŁĴĭ</w>": 7417, "ðŁĴĭðŁĴĭ</w>": 29214, "ðŁĴĮ</w>": 40817, "ðŁĴį": 35850, "ðŁĴį</w>": 24898, "ðŁĴİ": 25938, "ðŁĴİ</w>": 15874, "ðŁĴIJ": 27375, "ðŁĴIJ</w>": 20554, "ðŁĴij</w>": 49404, "ðŁĴĵ": 20628, "ðŁĴĵ</w>": 12568, "ðŁĴĵðŁĴĵ</w>": 43505, "ðŁĴĶ": 18880, "ðŁĴĶ</w>": 10704, "ðŁĴĶðŁĴĶ": 44673, "ðŁĴķ": 5412, "ðŁĴķ</w>": 3082, "ðŁĴķðŁĴķ": 23106, "ðŁĴķðŁĴķ</w>": 14117, "ðŁĴķðŁĴķðŁĴķ</w>": 26772, "ðŁĴĸ": 8466, "ðŁĴĸ</w>": 5582, "ðŁĴĸðŁĴĸ": 19562, "ðŁĴĸðŁĴĸ</w>": 30595, "ðŁĴĸðŁĴĸðŁĴĸ</w>": 33915, "ðŁĴĹ": 10148, "ðŁĴĹ</w>": 6690, "ðŁĴĹðŁĴĹ": 47158, "ðŁĴĹðŁĴĹ</w>": 24064, "ðŁĴĹðŁĴĹðŁĴĹ</w>": 36990, "ðŁĴĺ": 18223, "ðŁĴĺ</w>": 10816, "ðŁĴĺðŁĴĺ</w>": 40464, "ðŁĴĻ": 5305, "ðŁĴĻ</w>": 4074, "ðŁĴĻðŁĴĻ": 17833, "ðŁĴĻðŁĴĻ</w>": 27101, "ðŁĴĻðŁĴĻðŁĴĻ</w>": 30698, "ðŁĴĻðŁĴĽ": 46804, "ðŁĴĻðŁĴĽ</w>": 26230, "ðŁĴĻðŁĴľ": 47931, "ðŁĴĻðŁĴľ</w>": 42541, "ðŁĴļ": 8102, "ðŁĴļ</w>": 6521, "ðŁĴļðŁĴļ": 27497, "ðŁĴļðŁĴļ</w>": 46209, "ðŁĴļðŁĴļðŁĴļ</w>": 46182, "ðŁĴļðŁĴĽ</w>": 41232, "ðŁĴĽ": 8221, "ðŁĴĽ</w>": 6233, "ðŁĴĽðŁĴĻ</w>": 36337, "ðŁĴĽðŁĴļ": 37994, "ðŁĴĽðŁĴĽ": 32420, "ðŁĴľ": 6832, "ðŁĴľ</w>": 4882, "ðŁĴľðŁĴľ": 17280, "ðŁĴľðŁĴľ</w>": 28211, "ðŁĴľðŁĴľðŁĴľ</w>": 31004, "ðŁĴĿ": 36761, "ðŁĴĿ</w>": 22002, "ðŁĴŀ": 14862, "ðŁĴŀ</w>": 8988, "ðŁĴŀðŁĴŀ</w>": 36448, "ðŁĴŁ": 49394, "ðŁĴŁ</w>": 28828, "ðŁĴŃ</w>": 33848, "ðŁĵ": 1497, "ðŁĵ¢": 46560, "ðŁĵ¢</w>": 20901, "ðŁĵ£": 48841, "ðŁĵ£</w>": 21282, "ðŁĵ°:</w>": 28952, "ðŁĵ°</w>": 14985, "ðŁĵ±": 36104, "ðŁĵ±</w>": 20824, "ðŁĵ²</w>": 19363, "ðŁĵ·": 6966, "ðŁĵ·:</w>": 8294, "ðŁĵ·</w>": 5551, "ðŁĵ·@</w>": 40032, "ðŁĵ¸": 8401, "ðŁĵ¸:</w>": 10379, "ðŁĵ¸</w>": 6074, "ðŁĵ¸@</w>": 39660, "ðŁĵ¹</w>": 49251, "ðŁĵº": 21792, "ðŁĵº:</w>": 29728, "ðŁĵº</w>": 10450, "ðŁĵ»": 32711, "ðŁĵ»</w>": 15882, "ðŁĵ½</w>": 45361, "ðŁĵħ</w>": 21277, "ðŁĵĨ</w>": 23471, "ðŁĵĪ</w>": 23359, "ðŁĵĬ</w>": 22244, "ðŁĵĭ</w>": 46351, "ðŁĵĮ</w>": 22289, "ðŁĵį": 25043, "ðŁĵį:</w>": 36845, "ðŁĵį</w>": 8903, "ðŁĵĸ": 49003, "ðŁĵĸ</w>": 23043, "ðŁĵļ": 25433, "ðŁĵļ</w>": 15566, "ðŁĵĿ": 31888, "ðŁĵĿ:</w>": 48398, "ðŁĵĿ</w>": 15853, "ðŁĵŀ</w>": 24022, "ðŁĶ": 1428, "ðŁĶ¥": 3191, "ðŁĶ¥ "ðŁĶ¥</w>": 3016, "ðŁĶ¥ðŁĶ¥": 5692, "ðŁĶ¥ðŁĶ¥</w>": 11771, "ðŁĶ¥ðŁĶ¥ðŁĶ¥</w>": 11004, "ðŁĶ¥ðŁĶ¥ðŁĶ¥ðŁĶ¥": 23408, "ðŁĶ¥ðŁĶ¥ðŁĶ¥ðŁĶ¥</w>": 30989, "ðŁĶ¥ðŁĶ¥ðŁĶ¥ðŁĶ¥ðŁĶ¥</w>": 48401, "ðŁĶ¥ðŁĶĹ</w>": 35130, "ðŁĶª": 47078, "ðŁĶª</w>": 34545, "ðŁĶ«": 38116, "ðŁĶ«</w>": 20583, "ðŁĶ¬</w>": 44227, "ðŁĶ®</w>": 38077, "ðŁĶ´": 12408, "ðŁĶ´</w>": 10854, "ðŁĶ´âļªï¸ı": 46879, "ðŁĶ´âļªï¸ı</w>": 40055, "ðŁĶµ": 17531, "ðŁĶµ</w>": 17193, "ðŁĶµâļªï¸ı</w>": 42412, "ðŁĶ¶": 42880, "ðŁĶ¶</w>": 36222, "ðŁĶ·</w>": 37740, "ðŁĶ¸</w>": 24200, "ðŁĶ¹</w>": 19995, "ðŁĶº</w>": 45561, "ðŁĶģ</w>": 41299, "ðŁĶĬ": 32580, "ðŁĶĬ</w>": 20502, "ðŁĶİ</w>": 44935, "ðŁĶij</w>": 35127, "ðŁĶĴ</w>": 44972, "ðŁĶĶ</w>": 45753, "ðŁĶĹ": 47475, "ðŁĶĹ</w>": 14561, "ðŁĶĺ</w>": 38995, "ðŁĶľ</w>": 36011, "ðŁĶĿ": 44387, "ðŁĶĿ</w>": 29506, "ðŁķ": 7692, "ðŁķº": 33958, "ðŁķĬ": 42624, "ðŁķĬ</w>": 37760, "ðŁĸ": 6269, "ðŁĸ¤": 17603, "ðŁĸ¤</w>": 10860, "ðŁĸ¥</w>": 47990, "ðŁĹ": 7045, "ðŁĹ£": 33232, "ðŁĹ£</w>": 18583, "ðŁĹ£ï¸ı</w>": 37476, "ðŁĹĵ": 34335, "ðŁĹĵ</w>": 28773, "ðŁĹĵï¸ı</w>": 39847, "ðŁĺ": 668, "ðŁĺ¡": 21968, "ðŁĺ¡</w>": 17452, "ðŁĺ¡ðŁĺ¡": 37223, "ðŁĺ¢": 14308, "ðŁĺ¢</w>": 9925, "ðŁĺ¢ðŁĺ¢": 32923, "ðŁĺ¢ðŁĺ¢</w>": 47921, "ðŁĺ£</w>": 32718, "ðŁĺ¤": 26872, "ðŁĺ¤</w>": 20740, "ðŁĺ¥": 38383, "ðŁĺ¥</w>": 23951, "ðŁĺ¨</w>": 38080, "ðŁĺ©": 9051, "ðŁĺ©</w>": 9494, "ðŁĺ©ðŁĺ©": 22820, "ðŁĺ©ðŁĺ©</w>": 38031, "ðŁĺ©ðŁĺ©ðŁĺ©</w>": 49063, "ðŁĺª": 38181, "ðŁĺª</w>": 22243, "ðŁĺ«": 25141, "ðŁĺ«</w>": 22340, "ðŁĺ¬": 23704, "ðŁĺ¬</w>": 14549, "ðŁĺ®": 40163, "ðŁĺ®</w>": 21616, "ðŁĺ¯</w>": 37858, "ðŁĺ°</w>": 34728, "ðŁĺ±": 10938, "ðŁĺ±</w>": 9055, "ðŁĺ±ðŁĺ±": 22061, "ðŁĺ±ðŁĺ±</w>": 40767, "ðŁĺ±ðŁĺ±ðŁĺ±</w>": 40909, "ðŁĺ²": 40460, "ðŁĺ²</w>": 24620, "ðŁĺ³": 12047, "ðŁĺ³</w>": 8223, "ðŁĺ³ðŁĺ³": 32592, "ðŁĺ´": 23527, "ðŁĺ´</w>": 16415, "ðŁĺ´ðŁĺ´</w>": 49307, "ðŁĺµ</w>": 39368, "ðŁĺ¶</w>": 35207, "ðŁĺ·": 37943, "ðŁĺ·</w>": 25759, "ðŁĺ¸</w>": 36912, "ðŁĺ¹": 26477, "ðŁĺ¹</w>": 26573, "ðŁĺ¹ðŁĺ¹": 46287, "ðŁĺº</w>": 40613, "ðŁĺ»": 15453, "ðŁĺ»</w>": 12911, "ðŁĺ»ðŁĺ»": 34414, "ðŁĺ¼</w>": 44245, "ðŁĺ½</w>": 45156, "ðŁĺĢ": 12832, "ðŁĺĢ</w>": 7334, "ðŁĺĢðŁĺĢ</w>": 34503, "ðŁĺģ": 6967, "ðŁĺģ</w>": 4821, "ðŁĺģðŁĺģ": 37900, "ðŁĺģðŁĺģ</w>": 19213, "ðŁĺģðŁĺģðŁĺģ</w>": 29083, "ðŁĺĤ": 1424, "ðŁĺĤ)</w>": 42643, "ðŁĺĤ.</w>": 42550, "ðŁĺĤ</w>": 1558, "ðŁĺĤâĿ¤ï¸ı</w>": 36412, "ðŁĺĤðŁijĮ</w>": 42000, "ðŁĺĤðŁĺĤ": 2286, "ðŁĺĤðŁĺĤ</w>": 4112, "ðŁĺĤðŁĺĤðŁĺĤ": 22233, "ðŁĺĤðŁĺĤðŁĺĤ</w>": 4887, "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 9936, "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ</w>": 11522, "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ</w>": 19295, "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ</w>": 33415, "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ</w>": 48973, "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 28504, "ðŁĺĤðŁĺį</w>": 43128, "ðŁĺĤðŁĺŃ": 28965, "ðŁĺĤðŁĺŃ</w>": 25802, "ðŁĺĥ": 14079, "ðŁĺĥ</w>": 8520, "ðŁĺĥðŁĺĥ</w>": 38358, "ðŁĺĦ": 12141, "ðŁĺĦ</w>": 7624, "ðŁĺĦðŁĺĦ</w>": 32312, "ðŁĺħ": 15245, "ðŁĺħ</w>": 9188, "ðŁĺħðŁĺħ</w>": 39078, "ðŁĺĨ": 16541, "ðŁĺĨ</w>": 10943, "ðŁĺĨðŁĺĨ</w>": 39503, "ðŁĺĩ": 21694, "ðŁĺĩ</w>": 13091, "ðŁĺĪ": 14377, "ðŁĺĪ</w>": 9756, "ðŁĺĪðŁĺĪ</w>": 44473, "ðŁĺī": 9740, "ðŁĺī</w>": 4955, "ðŁĺīðŁĺī</w>": 40430, "ðŁĺĬ": 4692, "ðŁĺĬ</w>": 3020, "ðŁĺĬâĿ¤ï¸ı</w>": 43606, "ðŁĺĬðŁĺĬ": 12838, "ðŁĺĬðŁĺĬ</w>": 20842, "ðŁĺĬðŁĺĬðŁĺĬ</w>": 28685, "ðŁĺĬðŁĺĬðŁĺĬðŁĺĬ": 35519, "ðŁĺĭ": 12391, "ðŁĺĭ</w>": 7203, "ðŁĺĭðŁĺĭ</w>": 33304, "ðŁĺĮ": 19221, "ðŁĺĮ</w>": 12163, "ðŁĺį": 1796, "ðŁĺį "ðŁĺį.</w>": 48579, "ðŁĺį</w>": 1754, "ðŁĺįâĿ¤</w>": 29122, "ðŁĺįâĿ¤ï¸ı</w>": 21945, "ðŁĺįðŁijĮ</w>": 41005, "ðŁĺįðŁĴķ</w>": 35946, "ðŁĺįðŁĶ¥</w>": 46648, "ðŁĺįðŁĺĤ</w>": 48715, "ðŁĺįðŁĺį": 3663, "ðŁĺįðŁĺį</w>": 6471, "ðŁĺįðŁĺįðŁĺį": 30614, "ðŁĺįðŁĺįðŁĺį</w>": 7703, "ðŁĺįðŁĺįðŁĺįðŁĺį": 16603, "ðŁĺįðŁĺįðŁĺįðŁĺį</w>": 18925, "ðŁĺįðŁĺįðŁĺįðŁĺįðŁĺį</w>": 32078, "ðŁĺįðŁĺįðŁĺįðŁĺįðŁĺįðŁĺįðŁĺįðŁĺį": 48683, "ðŁĺįðŁĺĺ": 29646, "ðŁĺįðŁĺĺ</w>": 19849, "ðŁĺįðŁĺŃ</w>": 39555, "ðŁĺİ": 7426, "ðŁĺİ</w>": 4345, "ðŁĺİðŁĺİ</w>": 24048, "ðŁĺİðŁĺİðŁĺİ</w>": 39742, "ðŁĺı": 11624, "ðŁĺı</w>": 6909, "ðŁĺıðŁĺı</w>": 38151, "ðŁĺIJ": 38586, "ðŁĺIJ</w>": 19618, "ðŁĺij": 32469, "ðŁĺij</w>": 18937, "ðŁĺĴ": 20792, "ðŁĺĴ</w>": 11702, "ðŁĺĵ</w>": 28733, "ðŁĺĶ": 19532, "ðŁĺĶ</w>": 11432, "ðŁĺķ": 45741, "ðŁĺķ</w>": 20602, "ðŁĺĸ</w>": 35006, "ðŁĺĺ": 4240, "ðŁĺĺ</w>": 3352, "ðŁĺĺâĿ¤</w>": 48409, "ðŁĺĺâĿ¤ï¸ı</w>": 39150, "ðŁĺĺðŁĺį</w>": 38176, "ðŁĺĺðŁĺĺ": 15663, "ðŁĺĺðŁĺĺ</w>": 10507, "ðŁĺĺðŁĺĺðŁĺĺ</w>": 20208, "ðŁĺĺðŁĺĺðŁĺĺðŁĺĺ</w>": 44892, "ðŁĺĻ": 36201, "ðŁĺĻ</w>": 29209, "ðŁĺļ": 24897, "ðŁĺļ</w>": 19102, "ðŁĺĽ": 24550, "ðŁĺĽ</w>": 15745, "ðŁĺľ": 13226, "ðŁĺľ</w>": 7830, "ðŁĺľðŁĺľ</w>": 43065, "ðŁĺĿ": 20064, "ðŁĺĿ</w>": 12970, "ðŁĺŀ": 40458, "ðŁĺŀ</w>": 21103, "ðŁĺŁ</w>": 46947, "ðŁĺł</w>": 34094, "ðŁĺŃ": 2962, "ðŁĺŃ</w>": 3915, "ðŁĺŃâĿ¤ï¸ı</w>": 29567, "ðŁĺŃðŁĴķ</w>": 46306, "ðŁĺŃðŁĺĤ</w>": 38505, "ðŁĺŃðŁĺį</w>": 36893, "ðŁĺŃðŁĺŃ": 5300, "ðŁĺŃðŁĺŃ</w>": 11834, "ðŁĺŃðŁĺŃðŁĺŃ": 44089, "ðŁĺŃðŁĺŃðŁĺŃ</w>": 13116, "ðŁĺŃðŁĺŃðŁĺŃðŁĺŃ": 19793, "ðŁĺŃðŁĺŃðŁĺŃðŁĺŃ</w>": 27322, "ðŁĺŃðŁĺŃðŁĺŃðŁĺŃðŁĺŃ</w>": 43366, "ðŁĻ": 1478, "ðŁĻĢ</w>": 43092, "ðŁĻĤ": 32006, "ðŁĻĤ</w>": 14860, "ðŁĻĥ": 27222, "ðŁĻĥ</w>": 15652, "ðŁĻĦ": 20648, "ðŁĻĦ</w>": 13049, "ðŁĻħ": 42702, "ðŁĻĨ": 30050, "ðŁĻĨ</w>": 35730, "ðŁĻĪ": 12661, "ðŁĻĪ</w>": 9516, "ðŁĻĪðŁĻĪ</w>": 41796, "ðŁĻĬ": 23684, "ðŁĻĬ</w>": 16636, "ðŁĻĭ": 19193, "ðŁĻĭ</w>": 30274, "ðŁĻĮ": 4366, "ðŁĻĮ</w>": 4855, "ðŁĻĮðŁı»": 26756, "ðŁĻĮðŁı»</w>": 15799, "ðŁĻĮðŁı¼": 26584, "ðŁĻĮðŁı¼</w>": 15364, "ðŁĻĮðŁı½": 36660, "ðŁĻĮðŁı½</w>": 22962, "ðŁĻĮðŁı¾": 38023, "ðŁĻĮðŁı¾</w>": 26466, "ðŁĻĮðŁĻĮ": 21202, "ðŁĻĮðŁĻĮ</w>": 30430, "ðŁĻĮðŁĻĮðŁĻĮ</w>": 37127, "ðŁĻı": 4260, "ðŁĻı</w>": 5503, "ðŁĻıðŁı»": 25100, "ðŁĻıðŁı»</w>": 16650, "ðŁĻıðŁı¼": 31163, "ðŁĻıðŁı¼</w>": 18952, "ðŁĻıðŁı½": 34103, "ðŁĻıðŁı½</w>": 21540, "ðŁĻıðŁı¾": 34277, "ðŁĻıðŁı¾</w>": 21979, "ðŁĻıðŁĻı": 18227, "ðŁĻıðŁĻı</w>": 26510, "ðŁĻıðŁĻıðŁĻı</w>": 31702, "ðŁļ": 2730, "ðŁļ¨": 12198, "ðŁļ¨</w>": 6056, "ðŁļ¨ðŁļ¨": 36487, "ðŁļ¨ðŁļ¨</w>": 21440, "ðŁļ¨ðŁļ¨ðŁļ¨</w>": 41515, "ðŁļ©</w>": 44514, "ðŁļ«</w>": 35291, "ðŁļ²</w>": 37085, "ðŁļ´": 30825, "ðŁļ¶": 46060, "ðŁļĢ": 22400, "ðŁļĢ</w>": 13542, "ðŁļĢðŁļĢ": 49033, "ðŁļĤ</w>": 38949, "ðŁļĮ</w>": 46891, "ðŁļĹ": 33054, "ðŁļĹ</w>": 22783, "ðŁļĺ</w>": 35825, "ðŁļĻ</w>": 48487, "ðŁĽ": 11306, "ñ": 173, "ñ</w>": 429, "ò": 174, "ò</w>": 430, "ó": 175, "ó</w>": 431, "ô": 176, "ô</w>": 432, "õ": 177, "õ</w>": 433, "ö": 178, "ö</w>": 434, "÷": 179, "÷</w>": 435, "ø": 180, "ø</w>": 436, "ù": 181, "ù</w>": 437, "ú": 182, "ú</w>": 438, "û": 183, "û</w>": 439, "ü": 184, "ü</w>": 440, "ý": 185, "ý</w>": 441, "þ": 186, "þ</w>": 442, "ÿ": 187, "ÿ</w>": 443, "Ā": 188, "Ā</w>": 444, "ā": 189, "ā</w>": 445, "Ă": 190, "Ă</w>": 446, "ă": 191, "ă</w>": 447, "Ą": 192, "Ą</w>": 448, "ą": 193, "ą</w>": 449, "Ć": 194, "Ć</w>": 450, "ć": 195, "ć</w>": 451, "Ĉ": 196, "Ĉ</w>": 452, "ĉ": 197, "ĉ</w>": 453, "Ċ": 198, "Ċ</w>": 454, "ċ": 199, "ċ</w>": 455, "Č": 200, "Č</w>": 456, "č": 201, "č</w>": 457, "Ď": 202, "Ď</w>": 458, "ď": 203, "ď</w>": 459, "Đ": 204, "Đ</w>": 460, "đ": 205, "đ</w>": 461, "Ē": 206, "Ē</w>": 462, "ē": 207, "ē</w>": 463, "Ĕ": 208, "Ĕ</w>": 464, "ĕ": 209, "ĕ</w>": 465, "Ė": 210, "Ė</w>": 466, "ė": 211, "ė</w>": 467, "Ę": 212, "Ę</w>": 468, "ę": 213, "ę</w>": 469, "Ě": 214, "Ě</w>": 470, "ě": 215, "ě</w>": 471, "Ĝ": 216, "Ĝ</w>": 472, "ĝ": 217, "ĝ</w>": 473, "Ğ": 218, "Ğ</w>": 474, "ğ": 219, "ğ</w>": 475, "Ġ": 220, "Ġ</w>": 476, "ġ": 221, "ġ</w>": 477, "Ģ": 222, "Ģ</w>": 478, "Ģï¸ı": 9668, "Ģï¸ı</w>": 5511, "ģ": 223, "ģ</w>": 479, "ģà¸": 15016, "Ĥ": 224, "Ĥ</w>": 480, "Ĥâĸ": 29036, "ĤâĸĤâĸ": 30832, "ĥ": 225, "ĥ</w>": 481, "Ħ": 226, "Ħ</w>": 482, "Ħà¸": 20537, "Ħë": 34462, "Ħëĭ": 25170, "ħ": 227, "ħ</w>": 483, "ħï¸ı</w>": 33950, "Ĩ": 228, "Ĩ</w>": 484, "ĩ": 229, "ĩ</w>": 485, "Ī": 230, "Ī</w>": 486, "ī": 231, "ī</w>": 487, "īï¸ı</w>": 37463, "Ĭ": 232, "Ĭ</w>": 488, "Ĭãģ": 30294, "ĭ": 233, "ĭ</w>": 489, "ĭãģ": 36218, "ĭãĤ": 45737, "Į": 234, "Į</w>": 490, "ĮãĤĬãģ": 45969, "ĮãĤĬãģŁãģĦ</w>": 47021, "Įë": 17003, "į": 235, "į</w>": 491, "İ": 236, "İ</w>": 492, "ı": 237, "ı</w>": 493, "IJ": 238, "IJ</w>": 494, "ij": 239, "ij</w>": 495, "Ĵ": 240, "Ĵ</w>": 496, "ĵ": 241, "ĵ</w>": 497, "Ķ": 242, "Ķ</w>": 498, "Ķë": 37978, "Ķï¸ı": 24395, "Ķï¸ı</w>": 7443, "ķ": 243, "ķ</w>": 499, "ķãĤ": 26609, "ķï¸ı</w>": 44853, "ĸ": 244, "ĸ</w>": 500, "ĸï¸ı</w>": 28877, "Ĺ": 245, "Ĺ</w>": 501, "ĺ": 246, "ĺ</w>": 502, "Ļ": 247, "Ļ</w>": 503, "ļ": 248, "ļ</w>": 504, "Ľ": 249, "Ľ</w>": 505, "ľ": 250, "ľ</w>": 506, "ľë": 39810, "Ŀ": 251, "Ŀ</w>": 507, "ŀ": 252, "ŀ</w>": 508, "Ł": 253, "Ł</w>": 509, "ŁãģĦ</w>": 46023, "ł": 254, "ł</w>": 510, "łï¸ı": 27899, "łï¸ı</w>": 12715, "łĪ": 43364, "Ń": 255, "Ń</w>": 511 }
import torch import torch.nn as nn from collections import OrderedDict def conv_nd(dims, *args, **kwargs): """ Create a 1D, 2D, or 3D convolution module. """ if dims == 1: return nn.Conv1d(*args, **kwargs) elif dims == 2: return nn.Conv2d(*args, **kwargs) elif dims == 3: return nn.Conv3d(*args, **kwargs) raise ValueError(f"unsupported dimensions: {dims}") def avg_pool_nd(dims, *args, **kwargs): """ Create a 1D, 2D, or 3D average pooling module. """ if dims == 1: return nn.AvgPool1d(*args, **kwargs) elif dims == 2: return nn.AvgPool2d(*args, **kwargs) elif dims == 3: return nn.AvgPool3d(*args, **kwargs) raise ValueError(f"unsupported dimensions: {dims}") class Downsample(nn.Module): """ A downsampling layer with an optional convolution. :param channels: channels in the inputs and outputs. :param use_conv: a bool determining if a convolution is applied. :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then downsampling occurs in the inner-two dimensions. """ def __init__(self, channels, use_conv, dims=2, out_channels=None, padding=1): super().__init__() self.channels = channels self.out_channels = out_channels or channels self.use_conv = use_conv self.dims = dims stride = 2 if dims != 3 else (1, 2, 2) if use_conv: self.op = conv_nd( dims, self.channels, self.out_channels, 3, stride=stride, padding=padding ) else: assert self.channels == self.out_channels self.op = avg_pool_nd(dims, kernel_size=stride, stride=stride) def forward(self, x): assert x.shape[1] == self.channels if not self.use_conv: padding = [x.shape[2] % 2, x.shape[3] % 2] self.op.padding = padding x = self.op(x) return x class ResnetBlock(nn.Module): def __init__(self, in_c, out_c, down, ksize=3, sk=False, use_conv=True): super().__init__() ps = ksize if in_c != out_c or sk == False: self.in_conv = nn.Conv2d(in_c, out_c, ksize, 1, ps) else: self.in_conv = None self.block1 = nn.Conv2d(out_c, out_c, 3, 1, 1) self.act = nn.ReLU() self.block2 = nn.Conv2d(out_c, out_c, ksize, 1, ps) if sk == False: self.skep = nn.Conv2d(in_c, out_c, ksize, 1, ps) else: self.skep = None self.down = down if self.down == True: self.down_opt = Downsample(in_c, use_conv=use_conv) def forward(self, x): if self.down == True: x = self.down_opt(x) if self.in_conv is not None: x = self.in_conv(x) h = self.block1(x) h = self.act(h) h = self.block2(h) if self.skep is not None: return h + self.skep(x) else: return h + x class Adapter(nn.Module): def __init__(self, channels=[320, 640, 1280, 1280], nums_rb=3, cin=64, ksize=3, sk=False, use_conv=True, xl=True): super(Adapter, self).__init__() self.unshuffle_amount = 8 resblock_no_downsample = [] resblock_downsample = [3, 2, 1] self.xl = xl if self.xl: self.unshuffle_amount = 16 resblock_no_downsample = [1] resblock_downsample = [2] self.input_channels = cin self.unshuffle = nn.PixelUnshuffle(self.unshuffle_amount) self.channels = channels self.nums_rb = nums_rb self.body = [] for i in range(len(channels)): for j in range(nums_rb): if (i in resblock_downsample) and (j == 0): self.body.append( ResnetBlock(channels[i - 1], channels[i], down=True, ksize=ksize, sk=sk, use_conv=use_conv)) elif (i in resblock_no_downsample) and (j == 0): self.body.append( ResnetBlock(channels[i - 1], channels[i], down=False, ksize=ksize, sk=sk, use_conv=use_conv)) else: self.body.append( ResnetBlock(channels[i], channels[i], down=False, ksize=ksize, sk=sk, use_conv=use_conv)) self.body = nn.ModuleList(self.body) self.conv_in = nn.Conv2d(cin, channels[0], 3, 1, 1) def forward(self, x): x = self.unshuffle(x) features = [] x = self.conv_in(x) for i in range(len(self.channels)): for j in range(self.nums_rb): idx = i * self.nums_rb + j x = self.body[idx](x) if self.xl: features.append(None) if i == 0: features.append(None) features.append(None) if i == 2: features.append(None) else: features.append(None) features.append(None) features.append(x) return features class LayerNorm(nn.LayerNorm): """Subclass torch's LayerNorm to handle fp16.""" def forward(self, x: torch.Tensor): orig_type = x.dtype ret = super().forward(x.type(torch.float32)) return ret.type(orig_type) class QuickGELU(nn.Module): def forward(self, x: torch.Tensor): return x * torch.sigmoid(1.702 * x) class ResidualAttentionBlock(nn.Module): def __init__(self, d_model: int, n_head: int, attn_mask: torch.Tensor = None): super().__init__() self.attn = nn.MultiheadAttention(d_model, n_head) self.ln_1 = LayerNorm(d_model) self.mlp = nn.Sequential( OrderedDict([("c_fc", nn.Linear(d_model, d_model * 4)), ("gelu", QuickGELU()), ("c_proj", nn.Linear(d_model * 4, d_model))])) self.ln_2 = LayerNorm(d_model) self.attn_mask = attn_mask def attention(self, x: torch.Tensor): self.attn_mask = self.attn_mask.to(dtype=x.dtype, device=x.device) if self.attn_mask is not None else None return self.attn(x, x, x, need_weights=False, attn_mask=self.attn_mask)[0] def forward(self, x: torch.Tensor): x = x + self.attention(self.ln_1(x)) x = x + self.mlp(self.ln_2(x)) return x class StyleAdapter(nn.Module): def __init__(self, width=1024, context_dim=768, num_head=8, n_layes=3, num_token=4): super().__init__() scale = width ** -0.5 self.transformer_layes = nn.Sequential(*[ResidualAttentionBlock(width, num_head) for _ in range(n_layes)]) self.num_token = num_token self.style_embedding = nn.Parameter(torch.randn(1, num_token, width) * scale) self.ln_post = LayerNorm(width) self.ln_pre = LayerNorm(width) self.proj = nn.Parameter(scale * torch.randn(width, context_dim)) def forward(self, x): style_embedding = self.style_embedding + torch.zeros( (x.shape[0], self.num_token, self.style_embedding.shape[-1]), device=x.device) x = torch.cat([x, style_embedding], dim=1) x = self.ln_pre(x) x = x.permute(1, 0, 2) x = self.transformer_layes(x) x = x.permute(1, 0, 2) x = self.ln_post(x[:, -self.num_token:, :]) x = x @ self.proj return x class ResnetBlock_light(nn.Module): def __init__(self, in_c): super().__init__() self.block1 = nn.Conv2d(in_c, in_c, 3, 1, 1) self.act = nn.ReLU() self.block2 = nn.Conv2d(in_c, in_c, 3, 1, 1) def forward(self, x): h = self.block1(x) h = self.act(h) h = self.block2(h) return h + x class extractor(nn.Module): def __init__(self, in_c, inter_c, out_c, nums_rb, down=False): super().__init__() self.in_conv = nn.Conv2d(in_c, inter_c, 1, 1, 0) self.body = [] for _ in range(nums_rb): self.body.append(ResnetBlock_light(inter_c)) self.body = nn.Sequential(*self.body) self.out_conv = nn.Conv2d(inter_c, out_c, 1, 1, 0) self.down = down if self.down == True: self.down_opt = Downsample(in_c, use_conv=False) def forward(self, x): if self.down == True: x = self.down_opt(x) x = self.in_conv(x) x = self.body(x) x = self.out_conv(x) return x class Adapter_light(nn.Module): def __init__(self, channels=[320, 640, 1280, 1280], nums_rb=3, cin=64): super(Adapter_light, self).__init__() self.unshuffle_amount = 8 self.unshuffle = nn.PixelUnshuffle(self.unshuffle_amount) self.input_channels = cin self.channels = channels self.nums_rb = nums_rb self.body = [] self.xl = False for i in range(len(channels)): if i == 0: self.body.append(extractor(in_c=cin, inter_c=channels[i] else: self.body.append(extractor(in_c=channels[i-1], inter_c=channels[i] self.body = nn.ModuleList(self.body) def forward(self, x): x = self.unshuffle(x) features = [] for i in range(len(self.channels)): x = self.body[i](x) features.append(None) features.append(None) features.append(x) return features
{ "additional_special_tokens": [ "<extra_id_0>", "<extra_id_1>", "<extra_id_2>", "<extra_id_3>", "<extra_id_4>", "<extra_id_5>", "<extra_id_6>", "<extra_id_7>", "<extra_id_8>", "<extra_id_9>", "<extra_id_10>", "<extra_id_11>", "<extra_id_12>", "<extra_id_13>", "<extra_id_14>", "<extra_id_15>", "<extra_id_16>", "<extra_id_17>", "<extra_id_18>", "<extra_id_19>", "<extra_id_20>", "<extra_id_21>", "<extra_id_22>", "<extra_id_23>", "<extra_id_24>", "<extra_id_25>", "<extra_id_26>", "<extra_id_27>", "<extra_id_28>", "<extra_id_29>", "<extra_id_30>", "<extra_id_31>", "<extra_id_32>", "<extra_id_33>", "<extra_id_34>", "<extra_id_35>", "<extra_id_36>", "<extra_id_37>", "<extra_id_38>", "<extra_id_39>", "<extra_id_40>", "<extra_id_41>", "<extra_id_42>", "<extra_id_43>", "<extra_id_44>", "<extra_id_45>", "<extra_id_46>", "<extra_id_47>", "<extra_id_48>", "<extra_id_49>", "<extra_id_50>", "<extra_id_51>", "<extra_id_52>", "<extra_id_53>", "<extra_id_54>", "<extra_id_55>", "<extra_id_56>", "<extra_id_57>", "<extra_id_58>", "<extra_id_59>", "<extra_id_60>", "<extra_id_61>", "<extra_id_62>", "<extra_id_63>", "<extra_id_64>", "<extra_id_65>", "<extra_id_66>", "<extra_id_67>", "<extra_id_68>", "<extra_id_69>", "<extra_id_70>", "<extra_id_71>", "<extra_id_72>", "<extra_id_73>", "<extra_id_74>", "<extra_id_75>", "<extra_id_76>", "<extra_id_77>", "<extra_id_78>", "<extra_id_79>", "<extra_id_80>", "<extra_id_81>", "<extra_id_82>", "<extra_id_83>", "<extra_id_84>", "<extra_id_85>", "<extra_id_86>", "<extra_id_87>", "<extra_id_88>", "<extra_id_89>", "<extra_id_90>", "<extra_id_91>", "<extra_id_92>", "<extra_id_93>", "<extra_id_94>", "<extra_id_95>", "<extra_id_96>", "<extra_id_97>", "<extra_id_98>", "<extra_id_99>" ], "eos_token": { "content": "</s>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false }, "pad_token": { "content": "<pad>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false }, "unk_token": { "content": "<unk>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false } }
{ "version": "1.0", "truncation": null, "padding": null, "added_tokens": [ { "id": 0, "content": "<pad>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 1, "content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 2, "content": "<unk>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32000, "content": "<extra_id_99>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32001, "content": "<extra_id_98>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32002, "content": "<extra_id_97>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32003, "content": "<extra_id_96>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32004, "content": "<extra_id_95>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32005, "content": "<extra_id_94>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32006, "content": "<extra_id_93>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32007, "content": "<extra_id_92>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32008, "content": "<extra_id_91>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32009, "content": "<extra_id_90>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32010, "content": "<extra_id_89>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32011, "content": "<extra_id_88>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32012, "content": "<extra_id_87>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32013, "content": "<extra_id_86>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32014, "content": "<extra_id_85>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32015, "content": "<extra_id_84>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32016, "content": "<extra_id_83>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32017, "content": "<extra_id_82>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32018, "content": "<extra_id_81>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32019, "content": "<extra_id_80>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32020, "content": "<extra_id_79>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32021, "content": "<extra_id_78>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32022, "content": "<extra_id_77>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32023, "content": "<extra_id_76>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32024, "content": "<extra_id_75>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32025, "content": "<extra_id_74>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32026, "content": "<extra_id_73>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32027, "content": "<extra_id_72>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32028, "content": "<extra_id_71>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32029, "content": "<extra_id_70>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32030, "content": "<extra_id_69>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32031, "content": "<extra_id_68>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32032, "content": "<extra_id_67>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32033, "content": "<extra_id_66>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32034, "content": "<extra_id_65>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32035, "content": "<extra_id_64>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32036, "content": "<extra_id_63>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32037, "content": "<extra_id_62>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32038, "content": "<extra_id_61>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32039, "content": "<extra_id_60>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32040, "content": "<extra_id_59>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32041, "content": "<extra_id_58>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32042, "content": "<extra_id_57>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32043, "content": "<extra_id_56>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32044, "content": "<extra_id_55>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32045, "content": "<extra_id_54>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32046, "content": "<extra_id_53>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32047, "content": "<extra_id_52>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32048, "content": "<extra_id_51>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32049, "content": "<extra_id_50>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32050, "content": "<extra_id_49>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32051, "content": "<extra_id_48>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32052, "content": "<extra_id_47>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32053, "content": "<extra_id_46>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32054, "content": "<extra_id_45>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32055, "content": "<extra_id_44>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32056, "content": "<extra_id_43>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32057, "content": "<extra_id_42>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32058, "content": "<extra_id_41>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32059, "content": "<extra_id_40>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32060, "content": "<extra_id_39>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32061, "content": "<extra_id_38>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32062, "content": "<extra_id_37>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32063, "content": "<extra_id_36>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32064, "content": "<extra_id_35>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32065, "content": "<extra_id_34>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32066, "content": "<extra_id_33>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32067, "content": "<extra_id_32>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32068, "content": "<extra_id_31>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32069, "content": "<extra_id_30>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32070, "content": "<extra_id_29>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32071, "content": "<extra_id_28>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32072, "content": "<extra_id_27>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32073, "content": "<extra_id_26>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32074, "content": "<extra_id_25>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32075, "content": "<extra_id_24>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32076, "content": "<extra_id_23>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32077, "content": "<extra_id_22>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32078, "content": "<extra_id_21>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32079, "content": "<extra_id_20>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32080, "content": "<extra_id_19>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32081, "content": "<extra_id_18>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32082, "content": "<extra_id_17>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32083, "content": "<extra_id_16>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32084, "content": "<extra_id_15>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32085, "content": "<extra_id_14>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32086, "content": "<extra_id_13>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32087, "content": "<extra_id_12>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32088, "content": "<extra_id_11>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32089, "content": "<extra_id_10>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32090, "content": "<extra_id_9>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32091, "content": "<extra_id_8>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32092, "content": "<extra_id_7>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32093, "content": "<extra_id_6>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32094, "content": "<extra_id_5>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32095, "content": "<extra_id_4>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32096, "content": "<extra_id_3>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32097, "content": "<extra_id_2>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32098, "content": "<extra_id_1>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true }, { "id": 32099, "content": "<extra_id_0>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": false, "special": true } ], "normalizer": { "type": "Sequence", "normalizers": [ { "type": "Precompiled", "precompiled_charsmap": "ALQCAACEAAAAAACAAQAAgMz8AgC4BQAAhyIAgMzkAgC4PQAAeyIAgMzsAgC4BQAAiyIAgMw8AADNvAAAmwkAgJ4JAIChCQCAgx0AAIAZAACBGQAAPR0AgDUdAIBNHQCARR0AgIAxAACBMQAApAkAgIkxAAA9WAMAPEgDAEAKAIA+aAMAAYUAAIQBAQADjQAAAokAAAWVAAAEkQAAB50AAAaZAAAJqQAACKEAAAutAAAKpQAADbkAAAy9AAAPvQAADrkAABHFAAAQwQAAE80AABLJAAAV1QAAFNEAABfdAAAW2QAAGeUAABjhAAAb7QAAGukAAB31AAAc8QAAH/0AAB75AABhOAkAZR0AgGNADgBi8AgAZSgPAGSADgBn2A8AZvAPAGlwDABoMAwAa/AMAGrYDABtSA0AbBwNAG8QEgBubA0ARgoAgHAMEwBzqBMAcuwTAHUoEAB0TBAAd9ARAHYUEAB50BYAePQQAF0dAIB69BYAdR0AgG0dAIB/fQEAhgwAgEGAAgDeCwCAQxgAAELAAABFSAAARGAAAEeQBgBGhAEASSgGAEhsAQBLOAcASvAHAE1wBwBMRAcAT/AEAE7MBACnCQCAUCwFAFOgCgBSEAUAVQAKAFRQCgBX0AgAVhALAFlICABYuAgAhBEAAFo8CACA9QAAgZ0AANgLAIAtHQCAg2kCAIJFAgCBNQIAgDUCAIdtAwCGVQMAgTkAAIRlAgAXDACAigEEAInVAwCI7QMAjwkAAKgLAIApDACAjAkAAC8MAICJMQMAkQkAAMzYAABVHQCAfR0AgL0aAIBMCgCAgGUDAIENAwCGPQAAgx0DAMwQAgDNhAEAgikAAMx0AwCjgQYAxRoAgICxAgCBsQIAzRoAgIEpAAClwQAA1RoAgMzoAwDNYAIAUgoAgKjxAABYCgCAXgoAgGQKAIDdGgCAgWkAAMzcBACCEQEA5RoAgGoKAIDtGgCA/RoAgAUbAID1GgCAswkAgMygBADN3AQAzAgBALYJAIClHQCAhhEBAOEAKwDgfCcA44hIAuIMOAKdHQCAh5EBALUdAICtHQCAgNkBAIE1AADMxAIA6kRkApUdAIANGwCA72hkAoERBwCC8QEA8NCLAolVAACB5QEAFRsAgIfhAQCAbQAAgQ0AAIN5AAB2CgCAgXkAAICVAQDMOAEAzRQBAIzBAQB8CgCAvAkAgKMVAQDDlBcAwpwUAMWEFwDEUBcAx+wXAMaAEgCNHQCAiAoAgMvQFgDK4BYAzRQWADUMAIDPvCAAzpwZANHMJADQ2CUA0+gkALFRAQA7DACAp90HAL0dAIDWvCQA2cgnANjUIgDb+CcALRsAgIftBwCCCgCAzPgEAB0bAIAlHQCAh8kGALAJAICR3QcAuQkAgCUbAIBwCgCANRsAgIUdAICMDACAjPkGAAsMAICA1QYAgcEGAMzEAgDNBAUAglEAAIN1BwCArQYAgbkGAIY1BwCHKQcAhEEAAI4KAICn7QAAPRsAgIjpBwCJzQcAlAoAgI/BBwCM3QcAmgoAgOoLAICnXQYAsJ0AAKAKAICmCgCAo0EGAEUbAIBVGwCAfQwAgE0bAIBdGwCArXEGAGUbAIC/CQCAzPgDAM0sAwDCCQCAo+UAAMUJAICMTQAAsgoAgKfxAAC4CgCAsT0GAIedAACGlQAAqB0HAISJAAC+CgCAgqkAAIHVAACtAQcAygoAgJE9AACCmQEAyAkAgM0MBQDMCAUAgT0AAIeFAQCIvQEAdRsAgMUdAICuCwCAjJEBAEEMAIBHDACAzR0AgID1AQCBhQEAgoEBAIOdAQCEiQEAxAoAgIapAQCHXQAAiG0AAIlNAABtGwCAzBACAIxdAACCDQAA0AoAgI9JAACw6QAAfRsAgPALAICjKQEAgCUBAIFVAQCFGwCApzUBAMykAQDNEAIA1goAgI0bAICBNQAA3AoAgK4JAQDoCgCAzOgBAM0oAgCVGwCAo/EAAIQFAACdGwCA4goAgK0bAICotQAApRsAgIFdAAC1GwCAzPwBAM3AAQC9GwCAxRsAgIGFAwARDACAgeUDAO4KAICH6QMAywkAgIylAwDNGwCA+goAgKoJAIDVGwCAgZkDAIHdAwCMvQMAzSQBAMwgAQDMEAIAzTACAIH5AACHUQAAgFUAAIFZAAD0CgCAg0kAAIxBAADlGwCA3RsAgM4JAICBfQAAgHEAAMwgAwDNsAMAo30DANEJAICjEQMA7R0AgIEtAQCx/QAApzEDAK1BAwDlHQCAo20DAP0dAID1HQCA7RsAgKdtAwCANQAAgR0AALFtAwCILQAAmAwAgKeVAACBcQAAgFkAAINxAACj9QAAgVEAAK2BAAD1GwCAsQkDAIldAACEPQAAzDgBAISdAQCBGQAAgAkAAIRlAAD9GwCAzNAHAMzwBwAFHACAkYkAAMxMBgDNBAYAzHAGAM10BgDMQAcAmy0PAMyoBwDNrAcAhg0AAIdVDwCEQQ8ACQsAgIIBDACDVQ8AgDUBAIHZAQCkDACAj+kAAIztAACSDACA3R0AgIv1AACIbQ8AiQ0AAA8LAIC0CwCAgiUAAE0MAICBQQAAUwwAgBUeAIANHgCAJR4AgB0eAIAtHgCABR4AgIApAACBKQAA/AsAgA0cAICEeQAAFRwAgIFNAQCAoQEAGAsAgKP9DwDMOAIAzUgDAB0cAICBWQAAzXwCAMykDQAkCwCAWQwAgKjJDwCHOQAA1wkAgImhDwADCwCAkREAAJ4MAIDaCQCAmQsAgF8MAICAuQ8AgbkPANUdAICDjQ8A9gsAgCUcAICEBQAALRwAgB4LAIA1HACAKgsAgIGdDwCHIQAAh7UPAMyoAgDN6AIAzLQMAM3cDACmzQAAp8UAAE0cAICPgQ8AjIkPAKPlAAAwCwCAPRwAgDwLAICxyQAAhwUAAFUcAIBFHACAhz0AAF0cAIBxDACANgsAgKMFDwCB+QAAzKgDAGUcAIBICwCAjEkAAKPxAABtHACAdwwAgEILAICnlQAAfRwAgHUcAIDMrAMAzcgAAN0JAICHaQAA4AkAgIG9AACCeQAA4wkAgIe5AQBOCwCAkaUAAIEdAACdHACAVAsAgIgFAAClHACAm5EAAFoLAIDmCQCAjJEBANILAIDGCwCAwAsAgMwLAICDRQAAgrkBAIG5AQCApQEAPR4AgIZxAABgCwCAhEkAAIsVAACKPQAAiTkAAIhFAACP+QAAZgsAgLoLAICMBQAAp1EBAKZJAQBlDACAsHkAAKNZAQCMqQAAgKkAAIGpAACBlQAAgJUAAK1xAQBrDACAogsAgISNAABNHgCARR4AgKMhAABdHgCAVR4AgGUeAICBbQAAgG0AALEFAQCkOQAANR4AgIUcAIBsCwCAqAUAAJUcAICNHACArQkAAMywAQCBvQMAgL0DAIPNAwCtHACAtRwAgL0cAIDMvAEAzYQBAInpAwDMHAEAgdkCAIDFAgDNOAEAzDwBAMxoAgDNRAIAg00AAMUcAICH2QAAhy0AAIBFAACBEQAAggUAAHILAIDVHACAzRwAgN0cAIDMOAIAiBUAAIjhAACAbQAAgTkAAMyEAgDNUAEAo0UDAIQ5AQDlHACA7RwAgMzcAwDNSAIAbR4AgOkJAIB4CwCAhR4AgKoMAICBbQAA9RwAgH4LAICj0QAAfR4AgHUeAIDMiAQAgXUAAIB1AACBCwCAo7UAAMwABADNVAIA/RwAgIcLAICETQEAjQsAgAUdAIANHQCAzNAOAMwsAQDMAAUAzVwFAOwJAIDvCQCAzJgOAIHBAADMzA8AzDwOAMwIAQDNnA4AzNQPAM14DwDMPA4AzTgOAIHlAQCA5QEAg+UBAILlAQDUCQCAhOUBAIfhAQBBHQCAiaUBAIjZAQCByQcAOR0AgFEdAIBJHQCAzDQBAPUJAICA3QAAgekAAEMKAICD/QAAgM0AAIH5AACBEQcAaR0AgGEdAICJ0QAAzCgBAHkdAIBxHQCA4QsAgMw0AQDbCwCAgF0AAIFlAACjAQEAg2EAAIFxAACASQAAMR0AgBoMAICrCwCAiVUAACwMAIAyDACAWR0AgIEdAIDBGgCATwoAgIIdAACDeQcAgBkHAIEZBwCGIQAAhykAAISRBwDyCQCAimkAALHZBgCIaQAAifUHAEkKAICP3QcAjNkHAIkMAID4CQCAKR0AgPsJAICRoQcAgEEHAIFBBwCHBQAAyRoAgIKRBwDRGgCA2RoAgKOVBgCGhQcAp+0AAMyQAgDN4AUAsekAAKPBAABVCgCAWwoAgGEKAIBnCgCA/gkAgKVlBwDhGgCAzLgDAKhVBwDpGgCAbQoAgPEaAIABGwCACRsAgPkaAIABCgCAo60AAAQKAICMJQYABwoAgIxNAACpHQCAgm0AAIE9BgCCAQYAgWUAAKEdAICHZQAAuR0AgIcRBgCHrQEAsR0AgMxQAgDNxAIAgeEBAIDJAQCD4QEAkYkAAID9AQCB1QEAmR0AgIydAQCJNQAAcwoAgIB1AACBXQAAhi0AAIc1AACEfQAAERsAgIKFAQCDfQAAgJ0BAIGRAQAZGwCAj+kAAIzhAAB5CgCAfwoAgAoKAICIDQAAifkAAKc5AQCRHQCAiwoAgDgMAICjJQEAPgwAgLBZAACJHQCAggUAAMEdAICtFQEAjwwAgDEbAICGBQAAhQoAgCEbAIApGwCAp2kAAIANAQCBAQEAhzEAAKNJAACxGQEAzBACADkbAIAODACAkQoAgK1RAADM1AEAzfgBAKhBAABBGwCAzTgBAMw8AQCB7QMAlwoAgJ0KAICMDQAA7QsAgKMKAICBxQMAzGgCAKkKAICCxQMASRsAgITJAwCHKQAAhjEAAFkbAICCbQAAgAwAgFEbAICHYQAAYRsAgGkbAIAVHQCAzKgDAM2sAgCB+QAAiC0AAA0KAIAQCgCAEwoAgIw1AAC1CgCAuwoAgLHVAADBCgCAeRsAgMkdAICxCwCAzDABAEQMAIBKDACA0R0AgMwEAQDHCgCAcRsAgKelAADTCgCAo40AAMwUAgCAuQAAgbkAAKeFAAAIDACAgmUAAIEbAICMNQAA8wsAgMzsHADN/AMAiRsAgK6tAADZCgCAkRsAgMzABgDN0AYAsL0BAMyQBwDfCgCAgckBAMwYHQDNIAIAhBEAAOsKAIDNuAYAzKwGAKEbAIDlCgCAgSkAALEbAICpGwCAo+0BAMxAHQDNEAIAuRsAgMEbAICBCQAAyRsAgMxAHQDN0AIAqNkBABQMAIDMkAcAzBwBAMxgBgDNZAYA8QoAgBwKAIDRGwCAkSkBAP0KAICBzR8A2RsAgPcKAIDpGwCA4RsAgMzEBgDNwAYAgTEAAIDZAAAfCgCAIgoAgIK5AQCDRQEAgLkBAIG5AQCGXQEA8R0AgIRdAQDpHQCAzcAAAMzwAACIARwAiXkBAAEeAICPVQEAjGEBAPkdAICB3R4AgRUfAJkbAICBXR8AjIEfAIdBHwDMGAMAzWgDAIBNHwCBpR8AJQoAgIOpHwCMFR8AjNEeACgKAICHtR8AgJUfAIGZHwCBEQAAg70fAICFHwCBiR8A8RsAgIQ9AACbDACAiZkfAPkbAICIBQAABgsAgAEcAICADQAAgf0AAAkcAICj2R8Ao3keAKOFAAAMCwCArTUfAKdhHgCnqR8AoQwAgIQNAACnDACAozUfACsKAICtiR8AhHEAAKchHwCxPR4AsYUfAJUMAIDhHQCAEgsAgLcLAIDMtBwAzbAcAFAMAICxQR8AVgwAgJwLAIAZHgCAER4AgCkeAIAhHgCAgLkeAIG5HgCCIQEAgzUBAIRhAQAxHgCAhokBAIe9AQCIkQEAiekBANkdAICL/QEAjOUBAIINAAAJHgCAj90BAIO5AQCRrQEAgb0BAIC9AQCAoQEAgaEBAPkLAID/CwCAhD0AABEcAICJlQEAm4EBAIHNHgCAzR4AzPwCAM3wAgCB5QAAGRwAgIHtAACjpQAAzJABAM1cAgCHHQAAGwsAgKj5AAAhHACAJwsAgFwMAIBiDACAKRwAgIQFAAAxHACAo9UAACELAIA5HACAgVEAAMz0AQDN0AEALQsAgIc9AABRHACAMwsAgEEcAIA/CwCAhwUAAFkcAIBJHACAh/EDAIHZAwCBmQMAgZEAAGEcAIB0DACAjPkDAMwkAQCHuQMAgfkDADkLAIDMZAIAgskDAIyZAwBpHACAh9EDAI+RAwCB3QYAkfUDAMwABADN7AMAh2UAABkdAIBLCwCAcRwAgHoMAIBFCwCAzBgBAIg5AACBHACAeRwAgMxcAwCMJQAALgoAgMwsAQCx/QAAozkDADEKAIA0CgCAoRwAgKdZAwDMdAMAiAkAAKNRAwCpHACAXQsAgINtDQCnnQAApq0AAKOdAACxDQMAzCgBANULAICntQAAprUAAMkLAIDMMAEAgdUHAMMLAIDMKAEAzwsAgEEeAIBjCwCArYkAAGkLAICAzQEAgd0BAMxEAQDNnB4AhPUBAL0LAIDMWAEAzUwBAIDtAQCB/QEAg7UAAGgMAICM3QEAbgwAgMwIHgCM8QYAzDgBAM08AQBRHgCAiREAAIEFBgBJHgCAYR4AgFkeAIBpHgCAgz0AAIAhAACBOQAAgDkAAIEhAAA5HgCAiRwAgMwoAQCB2QYAbwsAgIH9BgDMJAEAmRwAgJEcAICxHACAgCEBAIE1AQCjBQAAuRwAgMEcAIDJHACAzIwFAM1AAgC3HAMAdQsAgIfNBwDZHACA0RwAgB0dAIDNiAAAzJAAAIzdBQCjhQAAFgoAgMzgAgDhHACAiNUHAIFNAACATQAAUQsAgOkcAIBXCwCAkTkHADcKAICIxQcApQsAgIrJBwDxHACAmz0AAIflBwBxHgCAgYUHAICFBwA6CgCAgvkHAILVBgCDRQAAgMkGAIHdBgCG4QYAewsAgIRRAACJHgCAipUGAIuZBgCIeQAAiZ0GAK0MAICPWQcAjG0HAPkcAIDMgAMAzSQCALARBwA9CgCAgR4AgCEdAIB5HgCAhAsAgICNAACBnQAAzOwDAM3oBAABHQCAigsAgKNJBwCQCwCACR0AgKO9BwARHQCAGwAAgOcHAIALAACApKUHAOsEAICKBQCAAwAAgKhhBwDZDQCAZQAAgMgDAIAbCQCArWkHAIAtAQCBPQEAgl0BAINRAQCEYQEAuAQAgKwEAICHYQEAiK0BAIm1AQCKvQEAjykVALwFAIAdDACAzHgCAM3YBQCB3QEAgXEAAOQLAICC/QEAhBkAACMMAICH7QEAIAwAgMw0BADNMAQA5wsAgJ9pFQAmDACAjMkBAM34BADM8AIAsUkBACEHAICB1QAAoxUBAKCZFQBzCACARgcAgIT1AADMKAQAzSwEAMMIAICveQEAqH0BADENAICqaQEAUgkAgLQlAQC1KQEAowkBAAIMAIDqBgCA7gYAgLIFAQCzPQEAvPUAAL39AAC+2QAAOAgAgLgBAQC5AQEAugEBADwHAIBDBwCAhgwAALOdAwCyiQMAswgAgIC9AwBpBwCAbAcAgBIJAIDkBgCA5wYAgDUIAICJhQMAzOQHAL+hAwAFDACA1wwAgIxlAADN5AwAzCQMAIlBAACIVQAAi0UAAIpFAACFtQMAhLUDAIeVAwCGgQMAAQ0AgAQNAIAHDQCAmCwAABMAAICmyAAAzYwGAMyoBgCFaQAAFwAAgDEAAIBpAACAzPADAAcAAIA1AACA0QwAgLGVAAAlDQCAs5UAALKVAAA1DQCAOA0AgEANAIA7DQCALg0AgHUAAICmBgCAJQAAgJgJAIAdIQCAv1UDAEMNAIAZIQCAFSEAgGEgAIC4bAAAlGUNAJIAAgCcrQEAnaUBAJqJAQCbiQEAmJkBAJmJAQDMIAYAzQQGAMxABgDNXAYAzDwHAM04BwDMvAcAhXUAAIABDwCBDQ8AaSAAgLqZAQCFBQAAcSAAgFkgAIC+hQEAgSkPAIAlDwBlIACAgiEPAIUpAAC0pQEAhREAAG0gAICziQ8AsoUPALHJAQCwAQwAt4EPALbtAQC17QEAtO0BAIFlAQCAZQEAg2EBALi1DwDMPAsAhHkBAIDhDwCB3Q8AdSAAgF0gAIDMyAQAzbgEAIWtAACFFQAAISEAgDkhAIDM6BkAzbQZAKRdAQBGDQCAok0CAKPxDwCgVQEAod0PAH8IAIBuCQCAOwkAgO0eAIBsCQCA9R4AgHcJAIDxHgCAsQgAgJMNAACtHgCA+R4AgITVDACF6Q4AlGkAAIfdDgC1HgCAmbQCAL0eAIDFHgCAsR4AgD0hAIC5HgCAn3QBAMEeAICRGA0AgI0OAIGBDgCGhQ4AlYwDAISJDgCXRAIAghEAAKm4AACA0QAAge0AAMkeAIBJDQCA5R4AgIVZDwCDiQAAoTQNAIFFDgCASQ4A6R4AgKU0AQCFYQ8AzPAUAB0fAIC5xAUAzMgDAM3cAwCA3QAAgcEAACUfAIC/kAUAhREAALHsBwCA9QAAgcEAAKEgAIC1jAYALR8AgLdABgCA3Q4AgekOAMwoAgDNtAIAgM0OAIH5DgCFKQAAg4UBAIB1AQCBsQEAgPEBAIHVAQCpIACANR8AgIUFAACxIACAgJkBAIG9AQCCfQAAk9UBAJThAQCFDQAAmSAAgCEfAICACQAAgRkAACkfAICTrQEAlC0AAKUgAICFDQAAMR8AgIUFAACtIACAOR8AgIUpAACCGQAAhTUAAIDxAACB4QAAtSAAgJ0gAIBBIQCAhQUAAGEhAICDdQEAgO0BAIEpAQDM8AEAzbABAEwNAIBdIQCAWSEAgKMNAIBdHwCAZR8AgIA9AACBDQAAbR8AgHUfAICALQAAgR0AAIIVAABhHwCAzSwBAGkfAIBxHwCAeR8AgIjFAwClIQCAzJACAM28AgCE7QMATw0AgIb5AwCdHwCAgIEDAIH9AwCAPQAAgTUAAIFJAACAQQAAzdwBAIJBAAClHwCAoR8AgKkfAIDNMAEAlJ0DAI0hAIDN8AEAzAwBAIG5AwCAxQMAg6EDAJOlAwCArQAAgdUAAICdAACBqQAAiSEAgFINAICBwQAAgMkAAIC1AACBgQAAhSEAgINpBADMcAMAzbQDAIEhAIDNPAEApg0AgJMBBADNjAIAzPQCAIANAACBNQAAlNkGANEfAIDVHwCA2R8AgMwIAQDNHAEAgREAAIApAACpIQCAghkAAICRAQCBkQEAzWgFAMyUAgDMEAkAzSgWAMxYDgDNeA4AzBQNAM3YCgDMKAwAzYwNAMzgFwDM4AoAzDgLAM30CACFEQAAVQ0AgIBRBwCBUQcA4SAAgM2QDgCFBQAA6SAAgMzYDgDN7AEA8SAAgM0ADgCFGQAAzfAPAM08DgDNVA4AzGgBAM1sAQDZIACAYQgAgJSZBwDMwDsAgGEBAIHZAACFKQAAzWQOAMx4AQDNfAEAga0HAICtBwCFZQAAgp0HAIBRAQCBUQEAlOEHAM3AAACEeQEAk8UHAIZhAQDlIACAiCEBAIUNAADtIACAzRgBAMzYAADNtAAAgN0HAIHNBwCZHwCAhQkAAM0fAID1IACA/R8AgN0gAIAFIACADSAAgBUgAIAJIACAASAAgK0hAIARIACAGSAAgMy4AgDNHAMAgGUAAIF1AACCfQAAHSAAgIUJAACFQQAAASEAgKkNAICAmQYAgSEHAIUZAACDfQAACSEAgIVZAAD9IACA+SAAgIDNAACB2QAAjR4AgIURAACE6QAAlR4AgIblAABBIACAgDUAAIENAACdHgCAhR0AAEkgAIClHgCAhQUAAFEgAICAVQAAgW0AAIJ9AACTRQAAlA0AAIUNAAA5IACAkR4AgIAJAACBEQAAmR4AgIUdAABFIACAoR4AgIUFAABNIACAgOkBAIHxAQCCBQAAqR4AgIUJAACFCQAAVSAAgD0gAICAbQEAgXkBAIIZAACDpQEADSEAgIV1AACFBQAAESEAgAUhAIAhIACAzMgCAM3cAgCsDQCAzR4AgIA5AACBOQAA1R4AgN0eAIDRHgCA2R4AgIAdAACBDQAA4R4AgCUgAICAxQAAgdUAAM3AAADMJAIAgNUAAIHFAACFOQAAg8kAACUhAICvDQCAgNUAAIEJAACFBQAALSEAgP0eAICBIACAgAkAAIERAAAFHwCAk5kAAJS5AAANHwCAhWUAAIU9AACJIACAk10AABUfAICFEQAAzXAFAMx0BQCUATwAkSAAgHkgAIDNKAEAhSAAgI0gAICFGQAAlSAAgH0gAIA1IQCAKSEAgCkgAICFJQAAhTkAAMz4AgDNxAMAzTwBALINAICBlQMAgI0DAM3EAQCCpQMAhVEAAIVJAADMKAEAzSwBAM04AQDMPAEAgGk+AIFpPgBJIQCARSEAgM04PADMVDwAgdE8AJOdPgDMSAEAzcgCAM00AQBNIQCAlLk+AFgNAICAoT4AgaE+AIKhPgCIjTwAVSEAgIWtAACALQAAgSEAAIXVPwCVHwCAgO0AAIHxAACGpQAARR8AgISpAADNJAEAzSgBAE0fAICI+T4AhfE/AFUfAIBJHwCAhcU/AM0wAQDNEAEAzfQGAIDdAQCB6QEAzbwGAM1wBgDM4AYAzVwBAMxoBgDNkAYAzWQGAM14BgDMrAcAzagHAMzoBwDNyAcAgk0/AIP9AgCANQIAgekCAFEfAIBZHwCAgAU9AIV9AQBRIQCALSAAgM0UAQApDgCAge0BAIDhAQDNPAEAgs0BAM0sAQCCdQEAgW0BAIBZAQCAZQEAgcUAAIUfAIDNJAEAzTgBAILxAACB+QAAgFkBAIApAACBcQAAzBgBAM18AQDNLAEAjR8AgIEdAACAHQAAiR8AgJEfAIBxIQCAzSQBAMzkPQDNXA8AzegAAMwMAQCA1QEAgckBAIKZAACD5T8ACR8AgBEfAIAZHwCAMSEAgCMOAIB1IQCAPR8AgDEgAIBBHwCALA4AgIBNPwCBQT8AfR8AgGkhAICBHwCAZSEAgIAlPwCBKT8Ak5E/AIN9AAAmDgCAlEEAAMzYAgDNrAIAbSEAgJNVAACACQAAgR0AALUNAIB9IQCAlEEAAK0fAICAnQAAgaEAAIAdAACBEQAAhKUAALUfAICGpQAAvR8AgIjxAACC0QAAgdkAAIDNAACAJQAAgSkAAIIFAADFHwCAsR8AgLkfAIDBHwCAk7EAAJQRAADJHwCAgB0AAIEVAACAJQAAgS0AAII9AAB5IQCAgO0AAIHRAACCFQAAg4EAAIHQPQA1IACAzCACAM3cAQCFeAIAkSEAgC8OAICZIQCAiRgDAN0fAICALQAAgTUAAIAJAACBbQAA5R8AgMEgAICRsQAAkKkAAJPdOwCSAQQAlaUAAJSVOwDtHwCAlqEAAIUJAACTQQAAySAAgPUfAICFBQAA0SAAgJT1AAC5IACAgLkAAIHdAACC5QAA4R8AgOkfAICF6QAAgAkAAIE1AACFBQAAxSAAgPEfAICFHQAAzSAAgPkfAICFBQAA1SAAgLHBBQCwxQMAvSAAgLLFAwC12QUAtM0DAJ0hAICFOQAAuf0DAKEhAICVIQCAuw0AgM0NAIAXDgCAAR8AgAUOAIDTDQCAzIgCAAsOAIDN4D4AzZABAMwkAQBwDQCAjg0AgEEOAIB9DgCAgLEAAM3UPgDN5D4Agw4AgMy8PgDNuD4AgNEDAIHtAwCC/QMAhmkAAD4OAICFnQMAzTwBADgOAIDM6AIAzTw/AIjlAADNGAEAiQ4AgIhBAAA7DgCAdw4AgM0sAQCVDgCAgNUAAJsOAICG4QAAhukAAEcOAIDNJAEAoQ4AgM0QAQCI0QAAiCkAAMz4AgBNDgCAzfgCAMwkAQCnDgCAhS0DAMygPgDNbD4AgNUDAIHNAwCCAQMAg/kDAMxkAwDNzAIARA4AgM0kAQDMDAIAzQgCAIERAADMnAMAzLA+AM20PgDMxD4AzcA+AMyAPgDNuD4ArQ4AgMyEAgDMmD8AzVA+AMwgPgDNoD4AzQw/AM0wPwDNeD8AzQQ/AIhZAAC/DgCAzfgBAMzEAQBKDgCAxQ4AgMsOAIDMFAIAzAgBAM3IAQCIBQAA0Q4AgNcOAIDMKAIAuQ4AgIgNAACG0QAAgB0BAITNAACI9QAAzDwCAIQ1AQDMRAIAhikBAIAOAICIZQEAhg4AgKdEBQBiDgCAi+0AAIjtAACBDQAAiCUAAIZlAADMcAIAzXQCAMwwAgDN2AUAXA4AgIwOAICAOQAAXw4AgMzgBQB6DgCAzCgBAM0UAQCGJQAAiFUAAAgOAICGhDAAxA0AgIDVBwCG/QcAmA4AgMwkAgCIPQAAng4AgGsOAICIPQAApA4AgMxIAgDNeAIAUA4AgKoOAICXwAUAlnAFAJUYBQCAaQAAk1gFAIE5AACIZQAAkPg8AIZZAACeqAUAhEUAAGgOAIDM1AIAmrQFAIBdAACYrAUAp+wEAIgRAADM2AIAzdwCAKO8BACwDgCAzGACAMIOAIBuDgCAyA4AgK0IBADODgCAq/QEAMwsAgCIBQAA1A4AgLfoAwC2HAQAtSgEAMwAAgCzKAQAi3kAAIh9AACwdAQAhkEAAL6kAwCEdQAAiB0AANoOAIC6TAMAzNwDALj8AwCDqAIAiA0AALwOAICIFQAAh5QCAMw4AgBlDgCAzAQCAIvcAgCPDQAAcQ4AgI8ZAADMIAIAdA4AgI3wAgCIdQAAmCADAJksAwCPDgCAlA0AgMxMAgCWcAMAzCQCAIg9AACSDgCAzCwCAIgFAACzDgCAzCQCAIgNAAC2DgCAh/UAAKjUAwCpxAMA3Q4AgNlgAgDSDwCA1Q8AgNsPAICUNQAAkzEAANloAgDYDwCA2UwCAJQFAADeDwCAlSEAAJQpAABQEACAdBYAgEMXAIDSFgCA2WACADcXAIC12AMAtPADAJQ1AADZWAIAWhcAgJQFAADZVAIAlA0AADEXAIDgdAEAisgAALwVAACIyAAA4IACAIcXAICBoAAApOwCAKTIAgCoXAAAvA0AAJkXAIDghAIAvAUAAJ0XAICk+AIA4PQCALDMAwCV0AAAXRcAgLPgAwCmyAIAp2ACAJLYAABkFwCAvsEAAGsXAICXwQAAchcAgHkXAICAFwCAzXg/AMy8PwC+gA0AixcAgLx4DAC9gA0AuvQMALtUDAC49AwAkhcAgLYXAIC3uAwAuhcAgLWMDACyoAMAs6AMAKEXAICxQAMArnACAK9kAwC4BQMArUgDAKgXAICvFwCAqEQDAKnYAwDaFwCAp9gDAKRoAgCliAMAtjUDALc9AwCSyAIAtT0DAJldAQCYTQEAm2UBAJppAQCdZQEAnGUBAJ+FAQCemQEAh5wCAL6tAACWpQAAl70AAMw0BQDNjDcAzLg4AM2sOACflQEAth0AAJ2ZAQCc9QEAs7EBAK54AgDhFwCAvhcAgJk9AADFFwCAmxkAAJoJAADMFwCA0xcAgOBIAgCeCQAArFwCAK30AgD6FwCA9hcAgP4XAIDoFwCAh2ADAO8XAICvVAIAvhEAAJcFAAACGACA4KwCAAYYAICG+AMAh+wDAOC0AgAOGACAr0gCAK6QAgDgPAIAvg0AAAoYAICXGQAA4NgCAIaEAwCWEQAAvwAMAJ1tAACcYQAAEhgAgLFMAgCzUAIAlQ0AABYYAICGnAMA4MgCALMEAgCCBQAAIhgAgLNQAgCVDQAAJhgAgBoYAIAeGACA4LQCAIaMAwCH3AMAvg0AAJVpAACWeQAAKhgAgLToAgC1UAIAlwUAADIYAIDg1AIAtPQCAL4ZAADgoAIALhgAgODUAgCZjAMAt9QCAIoFAAA2GACAOhgAgIoVAAC3NAIAjx0AAD4YAIBCGACAswUAAEYYAICzBQAAWxgAgJwJAACdCQAATRgAgFQYAICMBQAAYhgAgG0YAIB0GACAexgAgJ9JAACCGACAiRgAgGYYAICQGACAlxgAgNkYAIDPGACA6hgAgOAYAICeGACAg8kBAIH5AQCsGACAsxgAgLoYAIDBGACAyBgAgKUYAICAtAIApYgDAOEIAgCuHQAA8RgAgLwJAACN9QEA9RgAgOEAAgCSlQEA45QQAJNFAACXiQEAhRQAAId4AQCGAAQARjoAgEo6AIBOOgCAUjoAgFY6AICdeQAA74xoAJyhAQBaOgCAXjoAgKKZAABiOgCAZjoAgGo6AIBuOgCAp4kAAHI6AIB2OgCAqUkBAHo6AICsqQAAfjoAgII6AICGOgCAsyUBAIo6AICOOgCAkjoAgLchAQC2OQEAtTEBAJY6AICaOgCAufkAALkRAQC4GQEAnjoAgKI6AICmOgCAqjoAgICwAQCEiAIArjoAgIPIAQCEVAMAhFwEALI6AICEXAUAgN0DAIEtAACCMQAAvjwCALo6AIC+OgCAh4gDAIacBACzLQMAwjoAgMY6AIC+AAQAvhwFALbRAwC12QMAyjoAgLv5AwC68QMAmljTAYTgBwC/xQMAvtkDAL3dAwC83QMAvgAYAKUFAwCmDQMAzjoAgIQcGADSOgCA1joAgKPxAwCsAQMArQEDAK4FAwCvGQMArKQbAq3cGgKqLQMAqyUDAL5MGQC+SBoA2joAgL6AGwC04BoCtdQdArYwHgLvCAIA3joAgOGgAQC6OBoC4/gCALoAAAC9ZBwCvvQcAr8AEAKRBNMBkOT2AeBEAQCSCD4C4joAgOY6AIDqOgCA7joAgL6sHADyOgCA9joAgPo6AID+OgCAAjsAgAY7AIAKOwCAgbBtAICAAQCDHFIAgth3AIUgmgCEkL4AhwjPAIaM5gCJbDcBiOAsAYsYfgGK2BMBjeClAYzwWgGP/OsBjliPAbDVFwCxAWgAso1rALOdawC0SWsAtZVvAA47AIDgcAEAEjsAgBY7AIAaOwCAHjsAgIAZAACBGQAAggUAACI7AIAqOwCAoaUCAKJJBwCjQQcApEEGAKXVGwCm3RsAp8EaAKgBHACp4R8AqkkfAKsBEACs9RMAra0TAK4BFACv+RcAqDEGAKkxBgCqTQYAq0UGAKxNBgCtmQYAro0GAK+FBgCGgAMAhxgDAC47AIAyOwCANjsAgDo7AIA+OwCAQjsAgLhtBwC5dQcAun0HALt1BwC8bQcAvc0HAL75BwC/+QcAsKkGALGFBgCyeQcAs3kHALRpBwC1aQcAtl0HALdVBwC2OgCAs8EGAEY7AIAmOwCAth0GAEo7AIBOOwCAtcEGALppBgC7RQYAUjsAgFY7AIC+qQcAv6kHALypBwC9qQcAo4UGAFo7AIBeOwCAYjsAgGY7AICmWQYApYUGAGo7AICrAQYAqi0GAG47AIByOwCAr+0HAK7tBwCt7QcArO0HAKjBBgCpLQEAqiUBAKs9AQCsJQEArS0BAK4lAQCvlQEAdjsAgHo7AIB+OwCAgjsAgIY7AICCvQAAgb0AAIC9AAC4nQEAua0BALqlAQC7bQAAvHUAAL19AAC+dQAAv20AALD1AQCx/QEAssEBALPBAQC0tQEAtb0BALa1AQC3rQEAijsAgI47AICSOwCAs6EBAJY7AIC1oQEAtqEBAJo7AICGgAEAh8QBALo9AQC7NQEAvBkBAL0ZAQC+fQEAv3UBAKPtAQCeOwCAojsAgKY7AICqOwCApu0BAKXtAQCuOwCAq3kBAKpxAQCyOwCAtjsAgK85AQCuMQEArVUBAKxVAQC6OwCAvjsAgMI7AIDGOwCAyjsAgOGsAQDOOwCA42AGANI7AIDWOwCA2jsAgO9UBgDeOwCA4jsAgL60GgDmOwCA6jsAgO47AICGaBwAh4wDAPI7AID2OwCA+jsAgP47AICAOQAAgTkAAIIFAAACPACACjwAgA48AIASPACAFjwAgKgdAwCpQQMAqkEDAKtBAwCsQQMArUkDAK5xAwCvcQMAhCAdABo8AIAePACAIjwAgCY8AIAqPACALjwAgDI8AIC46QAAufUAALr9AAC78QAAvJEAAL2RAAC+iQAAv4kAALDhAACx4QAAsuEAALPhAAC04QAAte0AALbZAAC32QAA4wwHAOEgBwDhMAEA4wgHADY8AIA6PACAPjwAgEI8AIBGPACASjwAgE48AIBSPACA75gHAFY8AIBaPACA74gHALOJAgBePACAYjwAgL6AGgBmPACAtokCALWJAgBqPACAu2UBALplAQBuPACAcjwAgL9pAQC+ZQEAvXUBALx1AQC3PQYAtj0GALU9BgC0IQYAszUGALI1BgCxAQYAsAkGAL9ZBgC+UQYAvVkGALxNBgC7bQYAunkGALlxBgC4eQYAgJ0AAIGtAACCpQAAejwAgH48AICCPACAhjwAgIo8AICvcQYArmkGAK1tBgCsbQYAq4EGAKqZBgCpkQYAqJkGAAY8AIB2PACAjjwAgKPFHQCSPACApcUdAKbFHQCWPACAhgADAIdkAwCqKR4AqykeAKw5HgCtOR4ArikeAK8lHgCzOR4AmjwAgJ48AICiPACApjwAgLb9HgC1/R4AqjwAgLvZHgC60R4ArjwAgLI8AIC/aR8AvmEfAL1pHwC8wR4AqPEeAKnxHgCq8R4Aq/EeAKw1HgCtPR4ArjUeAK8tHgC2PACAujwAgL48AIDCPACAxjwAgMo8AIDOPACA0jwAgLjlHwC57R8AuuUfALv5HwC86R8AvZEfAL6RHwC/jR8AsFUeALFdHgCyVR4As/0fALTlHwC17R8AtuUfALfdHwCjeR8A1jwAgNo8AIDePACA4jwAgKa9HwClvR8A5jwAgKuZHwCqkR8AhogAAIdMAQCvKR4AriEeAK0pHgCsgR8AgEkAAIFJAACCWQAAs5keAOo8AIC1iR4AtlEBAO48AIDyPACA9jwAgLotAQC7JQEAvD0BAL0lAQC+JQEAvxUBAKhNHgCpVR4Aql0eAKtVHgCsTR4ArZ0BAK6JAQCvgQEAhKwBAPo8AID+PACAAj0AgAY9AIAKPQCADj0AgBI9AIC4ZQEAuW0BALplAQC7fQEAvGUBAL1tAQC+ZQEAv9kAALClAQCxrQEAsqUBALO9AQC0rQEAtZ0BALaVAQC3XQEAo9UdABY9AIAaPQCAHj0AgCI9AICmHQIApcUdACY9AICraQIAqmECACo9AIAuPQCAr1kCAK5pAgCtaQIArHECADI9AIA2PQCAOj0AgD49AIBCPQCARj0AgEo9AIBOPQCAgDkAAIE5AACCBQAAUj0AgFo9AIBePQCAh0ADAIZcBACETAQAYj0AgGY9AICEBAUA4yABAGo9AIDhqAEAbj0AgO+UGgByPQCAdj0AgHo9AIB+PQCAgj0AgIY9AICKPQCAs6EDAI49AICSPQCAlj0AgJo9AIC2fQMAtX0DAJ49AIC7WQMAulEDAKI9AICmPQCAv/0AAL79AAC9/QAAvEEDAKhRAgCpWQIAqmkCAKtpAgCstQIArb0CAK61AgCvrQIAhKgHAKo9AICuPQCAsj0AgIKpAAC2PQCAgKkAAIGpAAC4aQEAuWkBALoJAQC7CQEAvBkBAL0ZAQC+CQEAvwkBALDVAgCx3QIAstUCALNpAQC0eQEAtXkBALZpAQC3YQEA4bgBAOHUHwDjOB8A4wwbALo9AIC+PQCAwj0AgMo9AIDOPQCA0j0AgNY9AIDaPQCAvjwJAN49AIDvhBsA74QbAKOhAgDiPQCAhugEAIe8BQDmPQCApn0CAKV9AgDqPQCAq1kCAKpRAgDuPQCA8j0AgK/9AQCu/QEArf0BAKxBAgCzhQYAxj0AgPY9AID6PQCA/j0AgLaJBgC1jQYAAj4AgLuRBgC6iQYABj4AgAo+AIC/9QYAvokGAL2BBgC8iQYADj4AgBI+AIAWPgCAGj4AgB4+AIAiPgCAJj4AgO+EHQAqPgCA4QAEAC4+AIDj/AQAgBEAAIEdAACCBQAAMj4AgKjxBgCp8QYAqg0GAKsFBgCsBQYArQkGAK49BgCvNQYANj4AgDo+AICGiAAAhxADAD4+AIBCPgCARj4AgEo+AIC4EQYAuRkGALohBgC7IQYAvPUHAL39BwC+9QcAv+kHALBNBgCxVQYAsl0GALNVBgC0TQYAtTEGALYxBgC3MQYAo4UHAE4+AIBSPgCAVj4AgFo+AICmiQcApY0HAF4+AICrkQcAqokHAGI+AIBmPgCAr/UHAK6JBwCtgQcArIkHAGo+AICz4QYAbj4AgHI+AIC25QYAdj4AgHo+AIC18QYAur0GALuNBgB+PgCAgj4AgL59AQC/ZQEAvJUGAL11AQCoHQYAqSUGAKotBgCrJQYArD0GAK0hBgCuXQYAr00GAIY+AICKPgCAjj4AgJI+AICWPgCAgrkDAIGxAwCAuQMAuO0BALmFAQC6jQEAu4UBALydAQC9hQEAvo0BAL+FAQCwPQYAsQ0GALIFBgCz5QEAtP0BALXlAQC25QEAt9UBAKOlBQCaPgCAnj4AgKI+AICqPgCApqEFAKW1BQCuPgCAq8kFAKr5BQCGCAwAhxwDAK8hAgCuOQIArTECAKzRBQCyPgCAs/ECALY+AIC6PgCAtlUDAL4+AIDCPgCAteECALpxAwC7eQMAxj4AgMo+AIC+MQMAvz0DALxRAwC9UQMAqCUCAKk1AgCqPQIAqzUCAKwtAgCtkQMArpEDAK+RAwDOPgCA0j4AgNY+AIDaPgCArAAAAN4+AIDiPgCA5j4AgLiZAwC5rQMAuqUDALttAwC8dQMAvX0DAL51AwC/bQMAsPEDALH5AwCywQMAs8EDALSxAwC1vQMAtrUDALepAwDqPgCA7j4AgPI+AID2PgCA+j4AgP4+AIACPwCA76gaAL5oDADhlAEABj8AgOMcBgCADQAAgXEAAIJxAAAKPwCAo/UDAA4/AIASPwCAhEwCABo/AICmUQIApeUDAB4/AICrfQIAqnUCAIbIDACHLA0ArzkCAK41AgCtVQIArFUCAOFQBgAiPwCA4xQHAITADAAmPwCAKj8AgC4/AIAyPwCANj8AgDo/AIA+PwCAQj8AgEY/AIBKPwCA73gbAL74DwBOPwCAUj8AgFY/AICzjQEAWj8AgLWZAQC2jQEAXj8AgFY9AIBiPwCAuoUBALtNAQC8VQEAvV0BAL5VAQC/SQEAo0EOABY/AIBmPwCAaj8AgG4/AICmQQ4ApVUOAHI/AICrgQ4AqkkOAHY/AIB6PwCAr4UOAK6ZDgCtkQ4ArJkOAIBtAACBCQAAgh0AAH4/AIDvGAkAgj8AgIY/AICKPwCA4zwNAI4/AIDhWAwAkj8AgIbQAACHvAMAlj8AgJo/AICokQ4AqZkOAKrJDgCrxQ4ArN0OAK3BDgCuwQ4Ar/UOAIToAACePwCAoj8AgKY/AICqPwCArj8AgLI/AIC2PwCAuMEPALnBDwC6wQ8Au8EPALzBDwC9wQ8AvsEPAL/1DwCwjQ4AsUUOALJNDgCzRQ4AtF0OALVBDgC2QQ4At0EOAKhRDgCpWQ4Aqo0OAKudDgCshQ4ArY0OAK6FDgCvvQ4Auj8AgL4/AIDCPwCAxj8AgMo/AIDOPwCA0j8AgNY/AIC4kQ4AuZkOALqtDgC7RQEAvF0BAL1FAQC+RQEAv3UBALDFDgCxzQ4AssUOALPdDgC0xQ4AtbUOALa9DgC3tQ4AswUOANo/AIDePwCA4j8AgOY/AIC2DQ4AtQ0OAOo/AIC7CQ4AugEOAO4/AIDyPwCAv3EOAL4BDgC9CQ4AvBEOAIJtAACjQQ4AgFUAAIFlAACmSQ4A+j8AgP4/AIClSQ4AqkUOAKtNDgCGSAAAh3gAAK5FDgCvNQ4ArFUOAK1NDgCoXQIAqWECAKplAgCrdQIArG0CAK2xAgCusQIAr7ECAITsBAACQACABkAAgApAAIAOQACAEkAAgBZAAIAaQACAuHEDALlxAwC6cQMAu3EDALzVAwC93QMAvtUDAL/NAwCw0QIAsdECALLRAgCz0QIAtFEDALVRAwC2UQMAt1EDAB5AAICz6QIAIkAAgL6ABAC2NQIAJkAAgCpAAIC14QIAuhECALsRAgAuQACAMkAAgL6RAwC/kQMAvAECAL0BAgA2QACAOkAAgKOlAgA+QACApa0CAEJAAIBGQACApnkCAEpAAIBOQACAq10CAKpdAgCtTQIArE0CAK/dAwCu3QMAqNUCAKndAgCqLQEAqyUBAKw9AQCtJQEAri0BAK8lAQBSQACAVkAAgFpAAIBeQACAYkAAgGpAAIBuQACAckAAgLiFAQC5iQEAup0BALuVAQC8sQEAvbEBAL55AAC/eQAAsF0BALHlAQCy4QEAs/kBALTpAQC13QEAttUBALe9AQDh8A4AdkAAgOMUDgB6QACAgb0AAIC9AAB+QACAgq0AAIYABACH7AUAgkAAgIZAAICKQACAjkAAgO9gDgCSQACAlkAAgJpAAICFXH0AnkAAgKJAAIDjZAEApkAAgOG0AQCqQACA76AOAK5AAICmPgCAhPgFALJAAIC2QACAukAAgLMlBgBmQACAvkAAgMJAAIDGQACAtiUGALU1BgDKQACAu6EGALoZBgDOQACA0kAAgL+ZBgC+rQYAva0GALy1BgCCbQAA7zAEAIBVAACBZQAAvlwDANZAAICG+AAAh2wDANpAAIDeQACA4kAAgOZAAIDqQACA40QEAO5AAIDhjAcAo6UGAPJAAID2QACA+kAAgP5AAICmpQYApbUGAAJBAICrIQYAqpkGAAZBAIAKQQCArxkGAK4tBgCtLQYArDUGAA5BAICz+QcAEkEAgBZBAIC2SQcAGkEAgB5BAIC1UQcAulEHALtRBwAiQQCAJkEAgL41BwC/OQcAvEUHAL09BwCoNQYAqT0GAKo1BgCriQYArJ0GAK2NBgCusQYAr7EGACpBAIAuQQCAMkEAgDZBAICADQAAgbEAAIKxAAA6QQCAuKEGALmtBgC6vQYAu7UGALytBgC9XQEAvlUBAL9NAQCw0QYAsdEGALLVBgCzrQYAtLUGALW5BgC2qQYAt6UGAKO9BgA+QQCAQkEAgISEAgC+kAEApg0GAKUVBgBKQQCAqxUGAKoVBgCGCAAAh3wBAK99BgCucQYArXkGAKwBBgBOQQCAs60BAFJBAIBWQQCAtqkBAFpBAIBeQQCAta0BALptAQC7dQEAYkEAgGZBAIC+XQEAvzUBALxlAQC9VQEAqGECAKlhAgCqYQIAq2ECAKxhAgCtbQIArp0CAK+VAgBqQQCAbkEAgHJBAIB2QQCAekEAgH5BAICCQQCAhkEAgLiVAgC5nQIAuqECALuhAgC8cQMAvXEDAL5xAwC/cQMAsO0CALH1AgCy9QIAs8UCALTdAgC1tQIAtrECALexAgCKQQCAjkEAgJJBAICj5QIAlkEAgKXlAgCm4QIAmkEAgJ5BAICiQQCAqiUCAKs9AgCsLQIArR0CAK4VAgCvfQIApkEAgKpBAICuQQCAhEB8AIAVAACBHQAAggUAALJBAIC+7HwAukEAgIZIfQCHCAMAvkEAgMJBAIDGQQCAykEAgKidAgCpxQIAqsECAKvBAgCsxQIArc0CAK7xAgCv8QIAzkEAgNJBAIDWQQCA2kEAgMkAAADeQQCA4kEAgOZBAIC4wQEAucEBALrBAQC73QEAvM0BAL31AQC+/QEAv50BALBBAQCxQQEAskEBALNBAQC0QQEAtUEBALZBAQC3QQEA4TgGAOpBAIDjaAYA7kEAgPJBAID2QQCA+kEAgISUfQC+rHwA/kEAgAJCAIAGQgCAvrh/AApCAIDvEAEADkIAgBJCAIAWQgCAGkIAgB5CAIDhkAEAIkIAgONEAAAqQgCAgS0AAIAtAADvgAAAgjkAAC5CAIAyQgCA9j8AgDZCAIDhsH8AtkEAgOPUfAA6QgCAJkIAgD5CAICGuAAAh9QCAEJCAIBGQgCASkIAgE5CAIBSQgCAVkIAgO8gfABaQgCAs4l9AF5CAIBiQgCAZkIAgGpCAIC2jX0AtY19AG5CAIC7RX4AukV+AHJCAIB2QgCAv0V+AL5FfgC9VX4AvFV+AKNJfQB6QgCAfkIAgIJCAICGQgCApk19AKVNfQCKQgCAq4V+AKqFfgCOQgCAkkIAgK+FfgCuhX4ArZV+AKyVfgCCbQAAszF+AIBVAACBZQAAtvF/AITcAwCWQgCAtSF+ALrNfwC70X8AhgAEAIfUAAC+dX8Av3l/ALzBfwC9wX8AqOV/AKn1fwCq/X8Aq/V/AKztfwCtNX4Arj1+AK81fgCaQgCAnkIAgKJCAICmQgCAqkIAgK5CAICyQgCAtkIAgLjZfgC54X4AuuF+ALvhfgC85X4Avel+AL6ZfgC/mX4AsE1+ALFRfgCyUX4As1F+ALT1fgC1+X4Atul+ALfpfgCjdX8AukIAgL5CAIDCQgCAxkIAgKa1fgClZX8AykIAgKuVfgCqiX4AzkIAgNJCAICvPX4ArjF+AK2FfgCshX4A1kIAgLMxfgDaQgCA3kIAgLbFAQDiQgCA5kIAgLXRAQC6yQEAu8kBAOpCAIDuQgCAvs0BAL+xAQC8yQEAvckBAKjdfQCp9X0Aqv19AKvxfQCsHQIArQECAK45AgCvOQIA8kIAgPZCAID6QgCA/kIAgIIFAAACQwCAgBEAAIERAAC4EQIAuRkCALohAgC7IQIAvNUCAL3dAgC+1QIAv80CALBJAgCxSQIAslkCALNZAgC0TQIAtTECALYxAgC3MQIAvgADAKNxfQCEiAIAvoAEAKaFAgAKQwCADkMAgKWRAgCqiQIAq4kCAIYoBACHDAMAro0CAK/xAgCsiQIArYkCABJDAICEyAMAhcwFALPlAwAWQwCAteUDALbtAwAaQwCAHkMAgCJDAIC6bQMAu2UDALx9AwC9ZQMAvmUDAL9VAwAmQwCAKkMAgL8ABACjJQIALkMAgKUlAgCmLQIAMkMAgDZDAIA6QwCAqq0CAKulAgCsvQIAraUCAK6lAgCvlQIAPkMAgEJDAIBGQwCASkMAgE5DAIDjzAMAUkMAgOGsAQBWQwCA7xwDAFpDAIBeQwCAYkMAgGZDAIBqQwCAbkMAgOFwfwBGQQCA4wR+AHJDAIB6QwCA4ZQBAH5DAIDjWAEAgNkAAIHZAACCJQAA7+R+AIJDAICGQwCA7+B+AIpDAICzAQEAjkMAgIboBwCHLAQAkkMAgLY1AQC1BQEAlkMAgLvxAAC64QAAmkMAgJ5DAIC/sQAAvtEAAL3ZAAC84QAABkMAgHZDAICiQwCApkMAgKEBBACgEQQAoxkAAKLFBACotQYAqb0GAKrpBgCr/QYArO0GAK3VBgCu3QYArz0HALBFBwCxVQcAslUHALNtBwC0dQcAtRUHALYdBwC3FQcAuC0HALk1BwC6MQcAuw0HALwZBwC9GQcAvgkHAL8JBwCjQQYAqkMAgK5DAICyQwCAtkMAgKZ1BgClRQYAukMAgKuxBwCqoQcAj8ltAL5DAICv8QcArpEHAK2ZBwCsoQcAld11AJTBdACXzXAAli1zAJFdaACQVWgAk9l0AJJNaQCd5XgAnB17AJ9tBwCeuXgAmR1/AJhVcACboXwAmvl8AIJhbACDhWkAwkMAgMZDAICGEXUAhxF1AISVaQCFjWgAij10AIvFcgDKQwCAzkMAgI7dfgCPMX0AjD1xAI2dcQCSGX0Ak716ANJDAIDvkAkAltUGAJdRBQCUXXkAlQl5AJpxBQCbvQUA1kMAgNpDAIDeQwCA4agFAJx5AQDjuAgAoYUBAOJDAICjqQ0AogEMAKUBCACkOQ0Ap6kJAKa9CQCppRUAqAEUAKsBFACq/RUArbkRAKyxEQCvARwArqEQALH9HACw5R0As+kZALIBGAC1ASQAtH0ZAIQUAAC+FAAAgI0AAIGVAACCbQAA6kMAgIZQDwCHZAAA7kMAgPJDAIC61QcAu90HALjBBwC5wQcAvjEEAL8xBAC88QcAvfEHALKtBwCztQcAsK0HALGlBwC2nQcAt/UHALSlBwC1lQcAqmkHAKtpBwCoaQcAqWkHAK5pBwCvaQcArGkHAK1pBwD2QwCA+kMAgP5DAIACRACABkQAgApEAIAORACAEkQAgKgRBQCpHQUAqjkFAKs5BQCsLQUArVEFAK5JBQCvQQUAFkQAgBpEAIAeRACAIkQAgCZEAIAqRACALkQAgDJEAIC4XQIAuWkCALrBAwC7wQMAvPkDAL35AwC+kQMAv7UDALAJBQCxCQUAsuECALPhAgC0dQIAtX0CALZ1AgC3bQIAs7EEAIQAAgC+BA0ANkQAgDpEAIC20QQAtaUEAD5EAIC7zQQAus0EAEJEAIBGRACAv7kDAL6xAwC9NQMAvDUDAEpEAICj9QQATkQAgFJEAICmlQQAWkQAgF5EAICl4QQAqokEAKuJBACHqA0AhswMAK71AwCv/QMArHEDAK1xAwDhUAYA4TQHAONAAADjWAcAgNEAAIHdAACC1QAAYkQAgGZEAIBqRACAbkQAgHJEAIB2RACAekQAgO+cAADvyAcAfkQAgIJEAICzNQIAhkQAgLW1AQCKRACAjkQAgLa1AQC+7AwAkkQAgLuRAQC6mQEAvVEBALyJAQC/UQEAvlkBAKjtDQCp/Q0AqvUNAKttDgCsdQ4ArX0OAK51DgCvbQ4AVkQAgJZEAICaRACAnkQAgKJEAICmRACAqkQAgK5EAIC49Q4Auf0OALr1DgC7QQ8AvEEPAL1JDwC+cQ8Av3EPALAVDgCxHQ4AshUOALPNDgC01Q4Atd0OALbVDgC3zQ4Ao30NALJEAIC2RACAukQAgL5EAICm/Q4Apf0OAMJEAICr2Q4AqtEOAISoAgDGRACArxkOAK4RDgCtGQ4ArMEOAIBNAACBVQAAglUAALNRDwDKRACAtXEPALZxDwDORACAhuAAAIcEAwC6XQ8Auy0PALw1DwC9OQ8Avi0PAL8lDwCoVQ4AqV0OAKqVDgCrrQ4ArLUOAK29DgCutQ4Ar60OANJEAIDWRACA2kQAgN5EAIDiRACA5kQAgOpEAIDuRACAuGkBALlpAQC6eQEAu3kBALxpAQC9aQEAvt0BAL/VAQCw1Q4AsaUOALKtDgCzoQ4AtKUOALWtDgC2nQ4At1kBAKMdDgDyRACA9kQAgOZDAID6RACApj0OAKU9DgD+RACAq2EOAKoRDgACRQCABkUAgK9pDgCuYQ4ArXUOAKx5DgAKRQCADkUAgBJFAIAWRQCAGkUAgB5FAIAiRQCAJkUAgIANAACBFQAAgh0AACpFAIAuRQCAMkUAgIR4AQC+FAAA4xQPADpFAIDh4A0AhAADAIawBACHFAMAPkUAgEJFAIBGRQCASkUAgE5FAIBSRQCA78APAFZFAIBaRQCAXkUAgGJFAIBmRQCAakUAgLNtAwBuRQCAtX0DALZ1AwByRQCAdkUAgHpFAIC6UQMAu1EDALz1AwC9/QMAvukDAL/hAwB+RQCAgkUAgIZFAICKRQCAjkUAgJJFAICWRQCAmkUAgKhxAgCpeQIAqokDAKuJAwCsmQMArZkDAK6JAwCviQMAsPkDALH5AwCyTQMAs0UDALRBAwC1SQMAtnEDALdxAwC4IQMAuSEDALohAwC7IQMAvCEDAL0hAwC+IQMAvyEDAICdAQCBEQAAghEAAIQEBQDvFAAAnkUAgKJFAIC+EAUA48gAAKpFAIDh0AEArkUAgLJFAIC2RQCAukUAgL5FAICqeQIAq3kCAIboBACHYAUArsECAK/JAgCs3QIArdUCAMJFAICjRQIAxkUAgMpFAICmXQIAzkUAgNJFAIClVQIA1kUAgNpFAIDeRQCA4kUAgOZFAIDqRQCA7kUAgO+EDgC+rAQA4dAOAPJFAIDjFAEA9kUAgPpFAID+RQCAAkYAgLPdAQAGRgCACkYAgA5GAIASRgCAtv0BALX9AQAaRgCAu90BALrdAQCE4AQAHkYAgL+hAQC+vQEAvb0BALy9AQCoBQYAqR0GAKoVBgCrLQYArDUGAK09BgCuNQYArykGAKZFAICC9QcAgeUHAIDlBwAWRgCAIkYAgIYcAACHsAMAuCUGALnFBgC6zQYAu8UGALzdBgC9xQYAvs0GAL/FBgCwWQYAsVkGALIpBgCzKQYAtDkGALUlBgC2JQYAtx0GAKOdBgAmRgCAKkYAgC5GAIAyRgCApr0GAKW9BgA2RgCAq50GAKqdBgA6RgCAPkYAgK/hBgCu/QYArf0GAKz9BgBCRgCAs/UHAEZGAIBKRgCAtu0HAE5GAIBSRgCAteUHALqNBwC7kQcAVkYAgFpGAIC+dQcAv30HALyBBwC9fQcAqCUGAKkpBgCqOQYAqzkGAKwpBgCtKQYArnkGAK91BgBeRgCAYkYAgGZGAIBqRgCAbkYAgHJGAIB2RgCAekYAgLjVBgC53QYAuuEGALv9BgC85QYAve0GAL7lBgC/mQYAsA0GALERBgCyEQYAs+0GALT1BgC1/QYAtvUGALftBgCjsQYAgi0AAIEVAACAsQAANkUAgKapBgCloQYAfkYAgKvVBgCqyQYAgkYAgL5oAQCvOQYArjEGAK05BgCsxQYAikYAgLPxAQCGaAAAh3wBALZdAQCORgCAkkYAgLVVAQC6SQEAu0kBAJZGAICaRgCAvj0BAL8hAQC8OQEAvTUBAJ5GAICiRgCAhAQDAL6AHACmRgCA4RwGAKpGAIDjAAYAvwguAK5GAICyRgCA78gHALZGAIC6RgCAvkYAgMJGAIDGRgCAykYAgKN9AgDORgCApdkCANJGAIDWRgCAptECANpGAIDeRgCAq8UCAKrFAgCtuQIArLUCAK+tAgCusQIAqW0FAKhZBQCrDQIAqrkCAK0dAgCsHQIArwUCAK4NAgC+aB0A4kYAgOZGAIDqRgCAgB0AAIEJAACCmQEA7kYAgLnhAwC4KQIAu+EDALrpAwC94QMAvPkDAL/hAwC+6QMAsU0CALBNAgCzIQIAsi0CALUlAgC0OQIAtxECALYlAgCowQIAqdECAKrRAgCr5QIArP0CAK0VAQCuHQEArw0BAPJGAID6RgCA/kYAgAJHAIAGRwCACkcAgA5HAIASRwCAuAUBALkJAQC6HQEAuxUBALwxAQC9MQEAvv0BAL/1AQCweQEAsUEBALJBAQCzXQEAtEUBALVNAQC2RQEAtz0BAIagHQCHxB0AFkcAgO/YAAAaRwCAHkcAgCJHAIDvxAYAhGwcAOH0BgAmRwCA47AGACpHAIDhlAEALkcAgONEBgCzGQIAMkcAgDZHAIA6RwCAhewsALbVAQC1NQIAPkcAgLvFAQC6/QEAQkcAgEZHAIC/yQEAvsEBAL3JAQC81QEAo9kdAPZGAIBKRwCATkcAgFJHAICmFR4ApfUdAFZHAICrBR4Aqj0eAFpHAIBeRwCArwkeAK4BHgCtCR4ArBUeAIBpAACBaQAAggUAAGJHAIBmRwCAakcAgIcQAwCGfAMAbkcAgHJHAIB2RwCAekcAgH5HAICCRwCAhkcAgIpHAICopR8Aqa0fAKqlHwCrvR8ArKUfAK2tHwCupR8ArxUfAI5HAICSRwCAlkcAgJpHAICeRwCAokcAgKZHAICqRwCAuA0fALkZHwC6IR8AuyEfALzZAAC92QAAvskAAL/BAACwcR8AsXEfALJxHwCzRR8AtEEfALVNHwC2PR8AtzUfALMtHgCuRwCAskcAgLZHAIC6RwCAti0eALUtHgC+RwCAu7UeALq1HgDCRwCAxkcAgL+JHgC+hR4AvZEeALylHgCCKQAAo2keAIAdAACBFQAApmkeAMpHAIDORwCApWkeAKrxHgCr8R4A0kcAgITgAQCuwR4Ar80eAKzhHgCt1R4AqNUBAKnlAQCq7QEAq+UBAKz9AQCt5QEAru0BAK/lAQC+oAEAhkYAgNZHAIDaRwCAhhAAAId0AQDeRwCA4kcAgLh9AQC5wQAAusEAALvBAAC8wQAAvckAAL7xAAC/8QAAsJ0BALFFAQCyTQEAs0UBALRdAQC1RQEAtk0BALdFAQDmRwCA6kcAgO5HAIDyRwCA9kcAgO80AgDv7B4A+kcAgOHwHQDj4AIA4zAeAOGEAQD+RwCAAkgAgAZIAIAKSACAsyUCAJQAAAAOSACAEkgAgBZIAIC2JQIAtTUCABpIAIC7wQIAuhkCAB5IAIAiSACAv8ECAL7ZAgC90QIAvNkCACZIAIAqSACALkgAgKPpAgAySACApfkCAKbpAgA2SACAOkgAgD5IAICq1QIAqw0CAKwVAgCtHQIArhUCAK8NAgCAYQAAgWEAAIIFAABCSACASkgAgIQABAC+FAQATkgAgIbABACHUAMAUkgAgFZIAIBaSACAXkgAgGJIAIBmSACAqK0CAKm9AgCqtQIAqw0BAKwVAQCtHQEArhUBAK8NAQCE7AQAakgAgG5IAIBySACAdkgAgHpIAIB+SACAgkgAgLgdAQC5LQEAuiUBALvNAQC81QEAvd0BAL7JAQC/wQEAsH0BALFVAQCyXQEAs1UBALRNAQC1PQEAtjUBALctAQDhGB4AhkgAgOM4HgCKSACAjkgAgJJIAICWSACAmkgAgJ5IAICiSACAvmAEAKZIAICBdQAAgHUAAO/gHwCCbQAAqkgAgK5IAICG6AQAh3wFALJIAIDhkAEAukgAgOOgAAC+SACAwkgAgMZIAIDvtAAAykgAgM5IAIDSSACA1kgAgLUFBgBGSACAtkgAgLYFBgDaSACA3kgAgLOlBQDiSACAvRkGALwRBgC/YQYAvhEGAOZIAIDqSACAuwkGALohBgCj/QUA7kgAgPJIAID2SACA+kgAgKZdBgClXQYA/kgAgKtRBgCqeQYAAkkAgAZJAICvOQYArkkGAK1BBgCsSQYAqFEGAKlZBgCqYQYAq2EGAKxhBgCtYQYArmEGAK9hBgAKSQCADkkAgBJJAIAWSQCAgA0AAIGxAQCCsQEAGkkAgLhNBwC5VQcAul0HALtVBwC8TQcAvXUHAL59BwC/cQcAsMUHALHNBwCyxQcAs90HALTFBwC1zQcAtsUHALd5BwCz6QcAHkkAgCJJAICEwAEAvtgBALbhBwC16QcAJkkAgLsJBgC6AQYAhogAAIesAQC/CQYAvgEGAL0JBgC8EQYAKkkAgKOtBwAuSQCAMkkAgKalBwA2SQCAOkkAgKWtBwCqRQYAq00GAD5JAIBCSQCArkUGAK9NBgCsVQYArU0GAKhZBgCpZQYAqm0GAKtlBgCsYQYArWEGAK5hBgCvYQYAhKwBAEZJAIBKSQCATkkAgFJJAIBWSQCAWkkAgF5JAIC4kQEAuZkBALqhAQC7oQEAvHEBAL1xAQC+cQEAv3EBALDxAQCx8QEAsvUBALPdAQC0xQEAtbEBALaxAQC3sQEAs+UFAGJJAIBmSQCAakkAgG5JAIC24QUAtekFAHJJAIC7NQIAujUCAHZJAIB6SQCAv3UCAL4BAgC9CQIAvCECAH5JAICjoQUAgkkAgIZJAICmpQUAikkAgI5JAIClrQUAqnECAKtxAgCSSQCAvigDAK5FAgCvMQIArGUCAK1NAgCA1QAAgd0AAILhAACaSQCA4yABAJ5JAIDhqAEAokkAgO80AgCmSQCAhggMAIdoAwCsAAAAqkkAgK5JAICySQCAs40DALZJAIC6SQCAhIAMAL5JAIC2vQMAtYEDAMJJAIC7TQMAuk0DAMZJAIDKSQCAv00DAL5NAwC9TQMAvE0DAKhBAgCpTQIAqkUCAKtZAgCsSQIArX0CAK51AgCvuQIAvmgNAM5JAIDSSQCA1kkAgIRsDADaSQCA3kkAgOJJAIC4TQEAuVUBALpVAQC7ZQEAvH0BAL0VAQC+EQEAvxEBALDJAgCxyQIAstkCALPZAgC0yQIAtckCALZ9AQC3dQEA4XgHAOOYAADjuAYA4VwGAOZJAIDqSQCA7kkAgPJJAID2SQCA+kkAgP5JAIACSgCA7AAAAO9cAADv6AYACkoAgIFpAACAYQAAo4UCAIJhAACliQIADkoAgBJKAICmtQIAhkAMAIfEDACrRQIAqkUCAK1FAgCsRQIAr0UCAK5FAgCojQ4AqZEOAKqVDgCrqQ4ArKUOAK2tDgCupQ4Ar9kOAAZKAIAWSgCAGkoAgB5KAIAiSgCAJkoAgCpKAIAuSgCAuHUPALl9DwC6dQ8Au90PALzFDwC9zQ8AvsUPAL/9DwCwqQ4AsbUOALK1DgCzhQ4AtJ0OALVRDwC2UQ8At1EPALMdDgAySgCANkoAgDpKAIA+SgCAti0OALUtDgBCSgCAu3EOALptDgBGSgCASkoAgL+VDwC+WQ4AvVEOALxhDgBOSgCAo1kOAFJKAIBWSgCApmkOAFpKAIBeSgCApWkOAKopDgCrNQ4AYkoAgGZKAICuHQ4Ar9EPAKwlDgCtFQ4AqL0OAKnRDgCq0Q4AqykBAKw5AQCtOQEArikBAK8pAQCADQAAgRUAAIIdAABqSgCAbkoAgHJKAIC+dAIAdkoAgLjtAQC5hQEAuoEBALuBAQC8hQEAvY0BAL6xAQC/sQEAsFkBALFZAQCy7QEAs+UBALT9AQC15QEAtuUBALfVAQB6SgCAtqkBALWhAQB+SgCAs0kOAIJKAICGOAAAh9wBAL8xAQC+KQEAvSEBALwpAQC7jQEAuo0BAJZJAICGSgCAoxkOAIpKAICOSgCAkkoAgJZKAICm+QEApfEBAJpKAICr3QEAqt0BAJ5KAICiSgCAr2EBAK55AQCtcQEArHkBAKZKAIDv3A8AqkoAgK5KAICySgCAtkoAgLpKAIC+SgCAwkoAgMZKAIDKSgCAzkoAgNJKAIDj6A4A1koAgOGMDgCAEQAAgREAAIIRAACEQAIA2koAgN5KAIDiSgCAvhADAIbABACHRAMA6koAgO5KAIDySgCA9koAgPpKAID+SgCA7yQCAAJLAIAGSwCACksAgA5LAIASSwCAFksAgBpLAICE7AQAHksAgCJLAIAmSwCA4+wCACpLAIDhOAEALksAgLNVAwAySwCANksAgDpLAIA+SwCAth0DALUdAwBCSwCAuwkDALo5AwBGSwCASksAgL/9AAC+/QAAvfkAALwRAwCogQIAqYkCAKqdAgCrsQIArNUCAK3dAgCu1QIAr80CAIDNAQCBCQAAghkAAE5LAIBSSwCAWksAgL5wBQBeSwCAuFkBALlZAQC6aQEAu2kBALx5AQC9eQEAvmkBAL9lAQCwvQIAsY0CALKFAgCzbQEAtHkBALV5AQC2aQEAt2kBAIYgBACHCAUAYksAgGZLAIBqSwCAbksAgHJLAIDvXAAAhOwEAOFcDgB2SwCA44wOAHpLAIB+SwCAgksAgIZLAICjVQIAiksAgI5LAICSSwCAlksAgKYdAgClHQIAmksAgKsJAgCqOQIAnksAgKJLAICv/QEArv0BAK35AQCsEQIAqGkGAKlpBgCqeQYAq3kGAKxpBgCtaQYArp0GAK+VBgBWSwCApksAgKpLAICuSwCAsksAgLZLAIC6SwCAvksAgLj1BgC5+QYAuo0GALuFBgC8nQYAvYUGAL6FBgC/tQYAsO0GALH1BgCy/QYAs/UGALTtBgC10QYAttEGALfRBgCz8QYAghUAAIG1AACAtQAAwksAgLbpBgC14QYAvtQDALsxBgC6KQYAxksAgMpLAIC/FQYAvikGAL0hBgC8KQYAzksAgKO1BgCGyAAAh8gAAKatBgDSSwCA1ksAgKWlBgCqbQYAq3UGANpLAIDeSwCArm0GAK9RBgCsbQYArWUGAKg1BgCpOQYAqoEGAKuBBgCsgQYArYEGAK6BBgCvtQYA4ksAgOZLAIDqSwCA7ksAgPJLAID2SwCA+ksAgP5LAIC4nQYAua0GALqlBgC7aQEAvHkBAL15AQC+aQEAv2kBALDRBgCx0QYAstEGALPRBgC0tQYAtb0GALa1BgC3rQYAswkGAAJMAIAGTACACkwAgA5MAIC2AQYAtQkGABJMAIC7FQYAuhUGABZMAIAaTACAv3kGAL5xBgC9BQYAvAUGAB5MAICjTQYAIkwAgOZKAICmRQYAJkwAgCpMAIClTQYAqlEGAKtRBgAuTACAMkwAgK41BgCvPQYArEEGAK1BBgCB6QMAgN0DAISIAwCC4QMAhrA8AIeIAgC+VAMAOkwAgD5MAIBCTACARkwAgEpMAIBOTACAUkwAgFZMAIBaTACA4/AGAF5MAIDhMAYAhAA8AGJMAIBmTACAakwAgG5MAIByTACAhTQ9AHZMAIB6TACA77AHAH5MAICCTACAhkwAgIpMAICOTACAkkwAgL7EPACWTACAgp0BAIGdAQCAnQEAqA0CAKllAgCqfQIAq3UCAKxZAgCtWQIArpkDAK+ZAwCw6QMAsekDALL5AwCz+QMAtOkDALXpAwC2XQMAt1UDALhtAwC5dQMAunUDALtFAwC8XQMAvTUDAL4xAwC/KQMAmkwAgJ5MAICiTACAqkwAgOFgAwDv9AMA40QCAK5MAICyTACA4zwDAO/0NwDh/AEAtkwAgLpMAIC+TACAwkwAgIZkPwCHaD0AhTQhALOZAwDGTACAtb0DALa1AwDKTACAzkwAgNJMAIC6QQIAu0ECALxBAgC9QQIAvkECAL9BAgDWTACA2kwAgN5MAIDiTACA5kwAgOpMAIDuTACA7/gBAIRoPADhPAYA8kwAgOMcBgD2TACA+kwAgP5MAIACTQCAoxUDAAZNAIAKTQCADk0AgBJNAICmOQMApTEDABpNAICrzQIAqs0CAL5kPgAeTQCAr80CAK7NAgCtzQIArM0CAKgdPgCpJT4Aqi0+AKslPgCsPT4ArSU+AK4tPgCvJT4ApkwAgIL1PwCB5T8AgOU/ABZNAIAiTQCAhgAEAIecAwC4LT4AuTE+ALoxPgC7MT4AvNE+AL3RPgC+0T4Av80+ALBdPgCxIT4Asjk+ALM5PgC0KT4AtSk+ALYZPgC3FT4As6U+ACZNAIAqTQCALk0AgDJNAIC2pT4AtbU+ADZNAIC75T4Aupk+ADpNAIA+TQCAv+0+AL7tPgC97T4AvO0+AEJNAICj4T4ARk0AgEpNAICm4T4ATk0AgFJNAICl8T4Aqt0+AKuhPgBWTQCAWk0AgK6pPgCvqT4ArKk+AK2pPgCPBSUAsyU+AF5NAIBiTQCAtik+AGZNAIBqTQCAtSk+ALp9PgC7RT4Abk0AgHJNAIC+tT4Av70+ALxdPgC9vT4An304AJ5lOQCd8TgAnFE0AJtZNQCaUTUAmfEwAJgNMQCXZTEAlsEwAJVZLQCUTS0Ak+EsAJLZKQCRWSkAkPEoALSlGQC13RgAdk0AgIQIAACwkRUAsQEVALIBGACzvRkAgA0AAIGtAwCCpQMAek0AgKNhAACiHT0AoZk9AKBxPACkxQUApUEEAKYBCACn4QkANkwAgKH1AQCi6QEAo90FAKwBEACtxREArtkRAK85EACoZQgAqQEMAKrZDQCrCQ0AijEuAIuhMwB+TQCAgk0AgI65MwCPETYAjB0yAI1NMgCCJSYAg6krAL5kAwCEYAQAhqEvAIcVLgCEGSoAhZEqAJphPgCb7T4AhsgEAIfcAwCKTQCA4Vw+AJyJAwDjAD4Akmk2AJN5NwCOTQCA7xg+AJZNOwCXuT8AlME7AJVdOgCpnT0AqIk9AKu5PQCqrT0Arak9AKyhPQCvyT0ArqE9AL7oBACSTQCAlk0AgJpNAICeTQCAok0AgKZNAICqTQCAuVk9ALhRPQC7eT0AumU9AL1pPQC8YT0Avx09AL5hPQCxgT0AsLk9ALNpPQCyiT0AtXk9ALRxPQC3aT0AtnE9AKMhPACuTQCAsk0AgLZNAIC6TQCApi08AKUtPAC+TQCAq0E8AKp5PADCTQCAxk0AgK+5PACusTwArbk8AKxZPADKTQCAzk0AgLN9AwDSTQCAtdkDANZNAIDaTQCAttEDAN5NAIDiTQCAu8UDALrFAwC9uQMAvLUDAL+tAwC+sQMA5k0AgOpNAIDuTQCA71wDAIAVAACBHQAAgjEAAO+MPgCE7AQA4fw+APJNAIDjHD4A+k0AgOGUAQD+TQCA4yAAAKP1AwACTgCAh+gEAIZsBAAGTgCAplkDAKVRAwAKTgCAq00DAKpNAwAOTgCAEk4AgK8lAwCuOQMArTEDAKw9AwCGTQCA9k0AgBZOAIAaTgCAHk4AgCJOAIAmTgCAKk4AgKhxBgCpTQYAqo0GAKuFBgCsnQYArYUGAK6NBgCvhQYAsP0GALFBBwCyQQcAs0EHALRBBwC1SQcAtnEHALdxBwC4IQcAuSEHALolBwC7OQcAvCkHAL0VBwC+HQcAv/0HALMlBgAuTgCAMk4AgDZOAIA6TgCAtiUGALU1BgA+TgCAu6UHALoZBgBCTgCARk4AgL+tBwC+pQcAvbUHALy1BwBKTgCAo2EGAE5OAIBSTgCApmEGAFZOAIBaTgCApXEGAKpdBgCr4QcAXk4AgGJOAICu4QcAr+kHAKzxBwCt8QcAqLEGAKm9BgCqzQYAq90GAKzNBgCt/QYArvUGAK8VAQCA+QEAgc0BAILFAQC+ZAIAhpAAAIcAAQBqTgCAbk4AgLjRAQC52QEAuuEBALvhAQC8kQEAvZ0BAL6VAQC/iQEAsG0BALF1AQCyfQEAs3UBALRtAQC18QEAtvEBALfxAQCzRQYAZk4AgHJOAIB2TgCAek4AgLZ9BgC1RQYAfk4AgLuxAQC6qQEAgk4AgIZOAIC/NQEAvqkBAL2hAQC8qQEAik4AgKMBBgCOTgCAkk4AgKY5BgCWTgCAmk4AgKUBBgCq7QEAq/UBAJ5OAICiTgCAru0BAK9xAQCs7QEAreUBAOEoAQCmTgCA41ACAKpOAICuTgCAsk4AgLZOAIC6TgCAvk4AgMJOAIDGTgCAyk4AgIFxAACAGQAA75wCAIJ5AADOTgCA0k4AgITIAgCzxQMA2k4AgLXFAwC2xQMAvhADAIbADACHRAwAuqkDALulAwC8vQMAvaEDAL6hAwC/lQMArhEGAK8ZBgCsAQYArQEGAKqlBgCrEQYAqEU5AKlxOQDeTgCA4k4AgOZOAIDqTgCA7k4AgPJOAID2TgCA+k4AgL7tBwC/TQcAvNEHAL3lBwC63QcAu8EHALg1BgC51QcAtjkGALcNBgC0JQYAtTkGALIxBgCzPQYAsFEGALFRBgCoOQIAqTkCAKqBAgCrgQIArIECAK2JAgCusQIAr7ECAIRsDQD+TgCAvmANAAJPAIAGTwCACk8AgA5PAIASTwCAuE0BALlVAQC6XQEAu1UBALxNAQC9dQEAvn0BAL91AQCwoQIAsa0CALKlAgCzuQIAtKkCALWdAgC2lQIAt3kBAOFUBgDh1AcA4zgGAOOwBwAWTwCAGk8AgB5PAIAiTwCAhOQMACZPAIAqTwCALk8AgDJPAIA2TwCA72wAAO/kBwCjSQIAOk8AgD5PAIBCTwCASk8AgKZJAgClSQIATk8AgKspAgCqJQIAhkgMAIfcDACvGQIAri0CAK0tAgCsMQIAqFEOAKmlDgCqrQ4Aq6UOAKy9DgCtpQ4Arq0OAK+lDgCA5Q8Age0PAILlDwBGTwCAUk8AgFZPAIBaTwCAXk8AgLjVDwC53Q8AutUPALvpDwC8+Q8AvfkPAL7pDwC/6Q8AsN0OALFBDwCyRQ8As10PALRFDwC1TQ8AtkUPALftDwCzJQ4AYk8AgGZPAIBqTwCAbk8AgLYlDgC1NQ4Ack8AgLuFDwC6GQ4Adk8AgHpPAIC/iQ8AvoEPAL2JDwC8kQ8Afk8AgKNhDgCCTwCAhk8AgKZhDgCKTwCAjk8AgKVxDgCqXQ4Aq8EPAJJPAICWTwCArsUPAK/NDwCs1Q8Arc0PAKjRDgCp2Q4AqjkBAKs5AQCsKQEArSkBAK6dAQCvlQEAmk8AgJ5PAICiTwCApk8AgIANAACBtQAAgr0AAKpPAIC4lQEAuZ0BALqhAQC7oQEAvHEAAL1xAAC+cQAAv3EAALDtAQCx9QEAsvUBALPFAQC03QEAtbUBALaxAQC3sQEArk8AgLJPAICzuQEAvsACALWpAQC2TwCAuk8AgLahAQCGgAEAh8QBALs5AQC6IQEAvRkBALwpAQC/eQEAvhEBAKPxAQC+TwCA1k4AgMJPAIDGTwCApukBAKXhAQDKTwCAq3EBAKppAQDOTwCA0k8AgK8xAQCuWQEArVEBAKxhAQDWTwCA2k8AgN5PAIDiTwCA4agBAOZPAIDjQAIA6k8AgL8oFQDuTwCA73QCAPJPAID2TwCA+k8AgP5PAIACUACABlAAgON0DwCEiAMA4TQOAApQAIAOUACAElAAgBZQAICADQAAgRUAAIIRAAAaUACAHlAAgO+kDwAiUACAKlAAgKgZAwCpQQMAqkUDAKtdAwCsTQMArX0DAK51AwCvnQAAhaQVAL58AwCGCAQAhxwDAC5QAIAyUACANlAAgDpQAIC49QAAuf0AALr1AAC7jQAAvIEAAL2BAAC+gQAAv4EAALDlAACx7QAAsuUAALP5AAC07QAAtdEAALbVAAC3zQAAPlAAgEJQAIBGUACAs8ECAEpQAIC1yQIAtvECAE5QAIBSUACAVlAAgLotAQC7JQEAvD0BAL0hAQC+JQEAvxkBAKapAgCESAIAWlAAgKWRAgBeUACAo5kCAGJQAIBmUACArn0BAK9BAQCsZQEArXkBAKp1AQCrfQEAalAAgG5QAIByUACAdlAAgHpQAIB+UACA7+QAAIJQAICGUACAilAAgOMQDgCOUACA4VgOAJJQAICALQAAgREAAIIVAAC+sAUAs3UBAJpQAICHFAUAhmwEAJ5QAIC21QAAtWUBAKJQAIC7/QAAuvUAAKZQAICqUACAv6EAAL69AAC93QAAvN0AAKh9BgCptQYAqr0GAKu1BgCsrQYArRUHAK4dBwCvFQcAllAAgK5QAICyUACAtlAAgLpQAIC+UACAwlAAgMZQAIC4OQcAuTkHALrJBwC7yQcAvNkHAL3ZBwC+zQcAv8UHALBxBwCxeQcAskkHALNJBwC0OQcAtSUHALYhBwC3IQcAozUGAMpQAIDOUACA0lAAgNZQAICmlQcApSUGANpQAICrvQcAqrUHAN5QAIDiUACAr+EHAK79BwCtnQcArJ0HAOZQAIDqUACA7lAAgPJQAID2UACAgj0AAIE9AACAPQAA+lAAgP5QAIACUQCAhKADAL6kAwAGUQCAhvgAAIfgAACoxQYAqdUGAKrVBgCr5QYArP0GAK0xAQCuMQEArzEBAApRAIAOUQCAElEAgBZRAIAaUQCAHlEAgCJRAIAmUQCAuN0BALntAQC65QEAu40BALyVAQC9nQEAvpUBAL+NAQCwUQEAsVEBALJRAQCzUQEAtPUBALX9AQC29QEAt+0BALNdBgAqUQCALlEAgDJRAIA2UQCAtrEBALV1BgA6UQCAu5UBALqVAQA+UQCAQlEAgL85AQC+MQEAvYUBALyFAQClLQYARlEAgEpRAICm6QEATlEAgFJRAICjBQYAVlEAgK3dAQCs3QEAr2EBAK5pAQBaUQCAJlAAgKvNAQCqzQEAXlEAgGJRAICExAMAvwD0AGZRAICCPQAAgT0AAIA9AABqUQCAblEAgHJRAIC+YAMAelEAgH5RAICCUQCAhlEAgIbgHACHAAMA7wwHAIpRAICOUQCAklEAgJZRAICaUQCAnlEAgKJRAICmUQCAqlEAgOHABgCuUQCA4ywHALJRAIC2UQCAulEAgL5RAIDCUQCAxlEAgMpRAIDOUQCA0lEAgKiBAwCpgQMAqoEDAKuBAwCsgQMArYEDAK6BAwCvgQMAsEUDALFNAwCyRQMAs10DALRNAwC1fQMAtnUDALcZAwC4KQMAuTUDALo9AwC7MQMAvAEDAL31AAC+/QAAv+0AALMpAgDWUQCA2lEAgN5RAIDiUQCAtiECALUpAgCEUB0Au6kCALqhAgDqUQCA7lEAgL+ZAgC+qQIAvakCALyxAgCBTQAAgE0AAO+cAwCCXQAAhvAcAId4HQC+EB0A8lEAgPZRAID6UQCA/lEAgAJSAIDhkAEABlIAgONgAwAKUgCADlIAgBJSAIAWUgCAGlIAgB5SAIAiUgCAJlIAgO+UAQCE7BwA4XAGACpSAIDjUAEALlIAgDJSAIA2UgCAOlIAgKPpAgA+UgCAQlIAgEZSAIBKUgCApuECAKXpAgBOUgCAq2kCAKphAgBSUgCAvqgcAK9ZAgCuaQIArWkCAKxxAgCoMR4AqTEeAKoxHgCrMR4ArF0eAK1FHgCuTR4Ar0UeAOZRAICCzR8AgfUfAID9HwBWUgCAWlIAgIYcAACH+AMAuMUeALnNHgC6xR4Au90eALzFHgC9zR4AvsUeAL9ZHwCwPR4AsQUeALINHgCzBR4AtB0eALUBHgC2BR4At/0eALO5HgBeUgCAYlIAgGZSAIBqUgCAtsUeALXVHgBuUgCAu8EeALr5HgByUgCAdlIAgL/FHgC+2R4AvdEeALzZHgB6UgCAo/0eAH5SAICCUgCApoEeAIZSAICKUgCApZEeAKq9HgCrhR4AjlIAgJJSAICunR4Ar4EeAKydHgCtlR4AqCkeAKkpHgCqVR4Aq20eAKx1HgCtfR4ArnUeAK9pHgCWUgCAmlIAgJ5SAICiUgCAplIAgKpSAICuUgCAslIAgLjpHgC59R4Auv0eALv1HgC87R4AvZEeAL6RHgC/kR4AsB0eALHlHgCy7R4As+UeALT9HgC15R4Atu0eALflHgCz3R4AtlIAgLpSAIC+UgCAwlIAgLb9HgC1/R4AhFgBALshHgC62R4AvigAAMpSAIC/IR4AvjkeAL0xHgC8OR4AgU0AAIBNAACjlR4Agl0AAKW1HgDGUgCAzlIAgKa1HgB2UQCA0lIAgKtpHgCqkR4ArXkeAKxxHgCvaR4ArnEeAIYABACHRAMAs4ECANZSAIC1gQIA2lIAgN5SAIC2gQIAiAAAAOJSAIC74QIAuu0CAL3lAgC8+QIAv9ECAL7lAgDmUgCA6lIAgIREAwC+jAMA4UgCAO5SAIDjAAIA7/wfAPJSAIDhPB4A79wCAONgHwD2UgCA+lIAgP5SAIACUwCAqQUCAKixAgCrBQIAqgUCAK0NAgCsBQIArzUCAK41AgCEbAUABlMAgApTAIAOUwCAElMAgBZTAIAaUwCAHlMAgLnpAwC44QMAu/kDALrhAwC96QMAvOEDAL9dAwC+4QMAsSkCALAlAgCzPQIAsiECALUZAgC0LQIAt9kDALYRAgAiUwCAJlMAgCpTAICjhQMALlMAgKWFAwCmhQMAMlMAgDpTAIA+UwCAqukDAKvlAwCs/QMAreEDAK7hAwCv1QMAgEkAAIFVAACCVQAAo6kCAL6YBAClQQEApkEBAEJTAICG4AUAh+AFAKotAQCrOQEArBEBAK0FAQCuDQEArwUBAEZTAIBKUwCATlMAgO/cAABSUwCAVlMAgFpTAIDviB4AhCwHAOHsHgBeUwCA4xweAGJTAIDhlAEAZlMAgOMwAACzJQIAhWDmAGpTAIBuUwCAclMAgLbNAQC1zQEAdlMAgLu1AQC6oQEAelMAgH5TAIC/iQEAvoEBAL2JAQC8nQEANlMAgIJTAICGUwCAilMAgI5TAICSUwCAllMAgJpTAICoAQcAqQEHAKp1BwCrrQcArLUHAK29BwCuqQcAr6kHALDZBwCx7QcAsvkHALP1BwC0mQcAtZkHALaJBwC3gQcAuIkHALmJBwC6bQAAu2UAALx9AAC9ZQAAvm0AAL9lAACBCQAAgJkAAJ5TAICCHQAAolMAgKZTAICqUwCArlMAgKgNBQCpfQUAqk0FAKuhBgCspQYAra0GAK6dBgCv/QYAsIUGALGRBgCyqQYAs70GALSlBgC1rQYAtqUGALd5BgC4SQYAuUkGALpZBgC7WQYAvEkGAL1JBgC++QcAv/kHALNdBgCyUwCAhigCAIcsAQC2UwCAtp0GALWdBgC6UwCAu4kGALq9BgC+UwCAwlMAgL/9BgC+/QYAvYEGALyNBgDGUwCAoxkGAMpTAIDOUwCAptkGANJTAIDWUwCApdkGAKr5BgCrzQYA2lMAgN5TAICuuQYAr7kGAKzJBgCtxQYAqBkBAKkZAQCqjQAAq50AAKyNAACtvQAArrUAAK/dAADiUwCA5lMAgOpTAIDuUwCA8lMAgPZTAID6UwCA/lMAgLhpAAC5aQAAunkAALt5AAC8aQAAvWkAAL7dAwC/1QMAsKkAALGpAACyvQAAs7UAALSZAAC1mQAAtlkAALdZAAC+LAIAAlQAgAZUAIAKVACADlQAgBJUAIAaVACAHlQAgIAtAACBNQAAgj0AACJUAICGkAwAh+gCACZUAIAqVACAs0UDAC5UAIAyVACANlQAgDpUAIC2fQMAtUUDAD5UAIC7LQMAui0DAEJUAIBGVACAvx0DAL4dAwC9IQMAvCkDAKvNAwCqzQMASlQAgE5UAICv/QMArv0DAK3BAwCsyQMAo6UDAFJUAIBWVACAWlQAgF5UAICmnQMApaUDAGJUAIBmVACAalQAgG5UAIByVACAdlQAgII9AACBPQAAgD0AAHpUAIB+VACAglQAgIRgAwCG0AwAhzADAIpUAICOVACAvkQCAJJUAICWVACAmlQAgOEAAACeVACA46gGAKJUAICE7AwAplQAgO/QAwCqVACArlQAgLJUAIC2VACAulQAgLNtAQC+VACAwlQAgMZUAIDKVACAthEBALVlAQDOVACAuz0BALo1AQDSVACA1lQAgL/9AQC+/QEAvRUBALwVAQDaVACA4fwGAN5UAIDjPAcA4lQAgOZUAIDqVACA7lQAgPJUAIC+bAwA+lQAgP5UAIACVQCABlUAgApVAIDvFAYAgV0AAIBdAACj5QEAgm0AAKXtAQAOVQCAElUAgKaZAQCHqAwAhuQMAKu1AQCqvQEArZ0BAKydAQCvdQEArnUBAKgZDgCpGQ4AqiUOAKs1DgCsLQ4ArVEOAK5RDgCvUQ4AhlQAgPZUAIAWVQCAGlUAgB5VAIAiVQCAJlUAgCpVAIC47Q4AufUOALr1DgC7jQ4AvJUOAL2dDgC+lQ4Av40OALAxDgCxOQ4AsgEOALMBDgC0+Q4AtfkOALbdDgC31Q4AqHkOAKl5DgCqjQ8Aq4UPAKydDwCtgQ8AroUPAK+5DwAuVQCAMlUAgDZVAIA6VQCAPlUAgEJVAIBGVQCASlUAgLiRDwC5mQ8AuqEPALuhDwC8UQ8AvV0PAL5JDwC/SQ8AsM0PALHVDwCy3Q8As9UPALTNDwC1sQ8AtrEPALexDwCzBQ4ATlUAgFJVAIBWVQCAWlUAgLYBDgC1FQ4AXlUAgLsRDgC6CQ4AYlUAgISgAQC/dQ4AvgkOAL0BDgC8CQ4AgmkAAKNBDgCAWQAAgVEAAKZFDgC+WAEAZlUAgKVRDgCqTQ4Aq1UOAIbIAACHrAEArk0OAK8xDgCsTQ4ArUUOAGpVAIBuVQCAclUAgHZVAIB6VQCAflUAgBZUAICCVQCAqAkOAKkJDgCqGQ4AqxkOAKwJDgCtYQ4ArmEOAK+VAQCw7QEAsfUBALL9AQCz9QEAtO0BALV1AQC2fQEAt3UBALhNAQC5VQEAul0BALtVAQC8TQEAvfEAAL7xAAC/8QAAhlUAgIpVAICOVQCAklUAgJZVAIDj6A4AmlUAgOE0DgC+AAQA79wPAJ5VAICiVQCAplUAgKpVAICuVQCAslUAgLPxDQC2VQCAulUAgL5VAIDCVQCAtoENALXhDQDGVQCAu1ECALpJAgDKVQCAzlUAgL/RAgC+SQIAvUECALxJAgCjMQ0A0lUAgISIAwDaVQCA3lUAgKZBDQClIQ0A4lUAgKuRAgCqiQIA5lUAgOpVAICvEQIArokCAK2BAgCsiQIAgKkAAIGpAACCTQAA7lUAgOFkEgDjTAIA4wgLAOGsAQDyVQCA7zwCAO8YFgD2VQCAhlAGAIdIAwD6VQCA/lUAgKiBAgCpgQIAqoECAKuBAgCsgQIArYECAK6FAgCvHQEAAlYAgAZWAIAKVgCADlYAgBJWAIAWVgCAGlYAgIS4BQC4dQEAuX0BALp1AQC7CQEAvBkBAL0ZAQC+CQEAvwEBALBlAQCxbQEAsmUBALN9AQC0aQEAtV0BALZVAQC3TQEAHlYAgCJWAIAmVgCAKlYAgC5WAIAyVgCA7zQAAO/ADgDhXA4A4UwPAOOUAADjnA4ANlYAgIJlAACBfQAAgH0AADpWAIA+VgCAvsQHALNFAgBCVgCAtUUCALZNAgBKVgCAhkAGAIeQBAC67QEAu+UBALz9AQC95QEAvuEBAL/VAQCflQgAngUIAJ3dDQCcPQwAmzEMAJr1DQCZ7RAAmD0QAJfVEQCWsRUAlQUUAJTlFQCTtRkAkjEYAJE5GACQDRwAj2EcANZVAICz1QYATlYAgLX9BgBGVgCAUlYAgLaRBgBWVgCAWlYAgLuVBgC6lQYAvVUHALxVBwC/VQcAvlUHAF5WAIBiVgCAqo0GAKuFBgCsnQYArYUGAK6BBgCvtQYAhKgAAGZWAIBqVgCAoyUFAG5WAIClJQUApi0FAHJWAIB2VgCAelYAgH5WAICCVgCAhlYAgIpWAICOVgCAklYAgJZWAICaVgCAnlYAgKJWAICjqQUAotEEAKHZBACgZQUAgiEdAIM1HQCmVgCAqlYAgIaVGACH3RQAhBkZAIUZGQCKDRUAi7EUAK5WAICyVgCAjsURAI/VDACMzRAAjR0RAJJhDQCTdQ0AvkwAALpWAICWxQkAl80EAJSNDACVXQkAmkEFAJtBBQCGyP8Ah0wAAIFZAACAeQAAnCEEAIJRAAChxQEAvlYAgKMB/ACi2QEApRX9AKS1/QCnufkApgH4AKkJ+AColfkAqwX1AKqt9QCtsfEArAHwAK8d8ACurfEAseHtALAB7ACzAegAsv3sALVd6QC09ekAwlYAgMZWAIDKVgCAzlYAgNJWAIDWVgCA2lYAgN5WAIDiVgCA5lYAgKiNBACplQQAqpUEAKulBACsvQQArdkEAK75BACv8QQAhGz8AOpWAIDuVgCA8lYAgPZWAID6VgCA/lYAgAJXAIC4eQUAucUFALrNBQC7xQUAvN0FAL3FBQC+zQUAv+0FALCZBACxmQQAskkFALNJBQC0WQUAtVkFALZJBQC3SQUAox0EAL7M/AAGVwCAClcAgA5XAICmWQQApTUEABJXAICrXQQAql0EABZXAIAaVwCAr50FAK6dBQCtnQUArJ0FAB5XAICznQIAIlcAgCpXAIC2UQIALlcAgDJXAIC1uQIAukkCALtVAgCGSP0Ah8D8AL41AgC/PQIAvEUCAL09AgCo3QQAqUkDAKpRAwCrbQMArHUDAK2VAwCunQMAr7kDAICNAQCB5QEAguEBADZXAIA6VwCAPlcAgEJXAIBGVwCAuJUDALmdAwC6lQMAu60DALy1AwC9vQMAvrUDAL9VAgCwyQMAsdUDALLVAwCzrQMAtLUDALW9AwC2tQMAt60DAEpXAIBOVwCAo9EDAFJXAICl9QMAVlcAgFpXAICmHQMAXlcAgGJXAICrGQMAqgUDAK1xAwCsCQMAr3EDAK55AwDhKAcAZlcAgOPkBgBqVwCA4SgGAG5XAIDjaAEAclcAgHZXAIB6VwCA71gAAH5XAICCVwCAhlcAgO/IBgCKVwCAqE39AKmB/QCq0f0Aq9H9AKzx/QCt8f0ArvH9AK/x/QAmVwCAghEAAIEZAACA0f8AjlcAgJJXAICEdAMAvnQDALh1/gC5ff4AunX+ALvF/gC83f4AvcX+AL7F/gC/9f4AsJH9ALGR/QCykf0As5H9ALRV/gC1Xf4AtlX+ALdN/gCzWf0AllcAgIasAACHRAMAmlcAgLZx/QC1ef0AnlcAgLtV/QC6Vf0AolcAgKZXAIC/mf4AvpH+AL1F/QC8Rf0AqlcAgKMd/QCuVwCAslcAgKY1/QC2VwCAulcAgKU9/QCqEf0AqxH9AL5XAIDCVwCArtX+AK/d/gCsAf0ArQH9AKjN/wCp0f8AqtH/AKsh/gCsIf4ArSH+AK4h/gCvIf4AxlcAgMpXAIDOVwCA0lcAgNZXAIDaVwCA3lcAgOJXAIC4jf4AuZH+ALqV/gC7rf4AvLX+AL25/gC+qf4Av6n+ALDh/gCx4f4AsuX+ALP5/gC06f4AtdX+ALbd/gC3uf4As1n/AOZXAIC2VgCA6lcAgO5XAIC2of4Atan+APJXAIC7Jf4AuiX+APZXAID6VwCAvxH+AL4t/gC9Lf4AvDH+AIIZAACjHf8AgGUAAIEZAACm5f4A/lcAgAJYAICl7f4AqmH+AKth/gCEZAEAviAAAK5p/gCvVf4ArHX+AK1p/gAKWACA4zT+AA5YAIDhfP0AhrAEAIcIAwASWACAFlgAgBpYAIAeWACAhCQDAIQkBAAiWACA70j+ACZYAIAqWACAs+kCAC5YAIC+RAQAvkAFADJYAIC2nQIAtZkCADZYAIC7iQIAur0CADpYAIA+WACAv1kDAL5RAwC9WQMAvJECAKkdAgCoFQIAqyUCAKolAgCtWQIArFUCAK9NAgCuUQIAvmQGAEJYAIBGWACASlgAgE5YAIBSWACAVlgAgFpYAIC5+QMAuPEDALtNAwC68QMAvUEDALxZAwC/cQMAvkEDALEJAgCwPQIAs8kDALIBAgC12QMAtNEDALfJAwC20QMA4ZABAF5YAIDj8AAAYlgAgGZYAICCPQAAgT0AAIA9AABqWACAblgAgHJYAIB6WACAflgAgIJYAIDvLAAAhlgAgKPpAwCKWACAhugEAIdgBQCOWACApp0DAKWZAwCSWACAq4kDAKq9AwCWWACAmlgAgK9ZAgCuUQIArVkCAKyRAwCeWACAolgAgKZYAICqWACArlgAgLJYAIC2WACA71gBAISgBADhVP8AulgAgOOEAQC+WACAwlgAgMZYAIDKWACAs9kBAM5YAICFzBkA0lgAgNZYAIC28QEAtfkBANpYAIC7pQEAutkBAN5YAIDiWACAv50BAL6dAQC9pQEAvK0BAKgBBgCpDQYAqhEGAKsRBgCsMQYArTEGAK4pBgCvJQYAdlgAgILJBwCBwQcAgPEHAOZYAIDqWACAhhwAAIf8AwC47QYAufUGALr9BgC79QYAvO0GAL1RBwC+VQcAv00HALBdBgCxIQYAsjkGALMxBgC0GQYAtRkGALbdBgC31QYAo5kGAO5YAIDyWACA9lgAgPpYAICmsQYApbkGAP5YAICr5QYAqpkGAAJZAIAGWQCAr90GAK7dBgCt5QYArO0GAApZAICz8QcADlkAgBJZAIC2gQcAFlkAgBpZAIC1mQcAuo0HALtlBwAeWQCAIlkAgL59BwC/ZQcAvH0HAL11BwCoLQYAqTUGAKo9BgCrMQYArFUGAK1FBgCuRQYAr3UGACZZAIAqWQCALlkAgDJZAIA2WQCAOlkAgD5ZAIBCWQCAuOkGALn1BgC6/QYAu/UGALztBgC9kQYAvpUGAL+NBgCwDQYAseUGALLtBgCz5QYAtP0GALXlBgC27QYAt+UGAKO1BgBGWQCASlkAgE5ZAIBSWQCApsUGAKXdBgAGWACAqyEGAKrJBgBWWQCAWlkAgK8hBgCuOQYArTEGAKw5BgCASQAAgUkAAIJZAACzRQEAXlkAgLVFAQC2RQEAYlkAgIZAAACHZAAAuikBALslAQC8PQEAvSEBAL4hAQC/FQEAZlkAgGpZAICEBAMAvgAMAOMoBgDv4AIA4RAGAG5ZAIDvkAYA4zwCAHJZAIDh1AEAdlkAgHpZAIB+WQCAglkAgIZZAICKWQCAo8ECAI5ZAIClwQIAklkAgJZZAICmwQIAmlkAgJ5ZAICroQIAqq0CAK2lAgCsuQIAr5ECAK6lAgCpBQIAqLECAKsFAgCqBQIArQ0CAKwFAgCvNQIArjUCAISoDACiWQCAplkAgKpZAICuWQCAslkAgLZZAIC6WQCAuekDALjhAwC7+QMAuuEDAL3pAwC84QMAv10DAL7hAwCxKQIAsCUCALM9AgCyIQIAtRkCALQtAgC32QMAthECAKitAgCp1QIAqtUCAKsNAQCsFQEArQkBAK4xAQCvLQEAvlkAgMJZAIDKWQCAzlkAgNJZAIDWWQCA2lkAgN5ZAIC4IQEAuSEBALrtAQC75QEAvP0BAL3lAQC+7QEAv+UBALBVAQCxXQEAslUBALMtAQC0NQEAtTkBALYtAQC3JQEAgD0BAIGlAACCrQAA79QHAOJZAIDmWQCA6lkAgO8oBwC+LAwA4fQGAO5ZAIDjkAcA8lkAgOGUAQD2WQCA4wwGALMdAgD6WQCAh0QNAIZMDQD+WQCAtskBALXdAQACWgCAu9kBALrRAQAGWgCACloAgL+9AQC+sQEAvbkBALzBAQDGWQCADloAgBJaAIAWWgCAGloAgB5aAIAiWgCAJloAgKgJDwCpCQ8AqhkPAKsZDwCsCQ8ArQkPAK6pDwCvqQ8AsNkPALHtDwCy+Q8As/UPALSVDwC1hQ8AtoUPALe1DwC4jQ8AuWEAALphAAC7YQAAvGEAAL1hAAC+YQAAv2EAAKNdDQCCLQAAgRUAAIAdAAAqWgCApokOAKWdDgAuWgCAq5kOAKqRDgAyWgCANloAgK/9DgCu8Q4ArfkOAKyBDgA6WgCAs/UPAIboAwCHvAMAtu0PAD5aAIBCWgCAteUPALp5DwC7TQ8ARloAgEpaAIC+NQ8AvyUPALxJDwC9RQ8AozEOAE5aAIBSWgCAVloAgFpaAICmKQ4ApSEOAF5aAICriQ4Aqr0OAGJaAIBmWgCAr+EOAK7xDgCtgQ4ArI0OAGpaAIBuWgCAcloAgHZaAIB6WgCAfloAgIJaAICGWgCAiloAgI5aAICSWgCAlloAgIANAACB1QAAgt0AAJpaAICoQQEAqVEBAKpRAQCrZQEArH0BAK2RAACukQAAr5EAAJ5aAICiWgCAhGQBAL5kAQCGkAEAh4QAAKpaAICuWgCAuJEAALmRAAC6kQAAu5EAALyxAAC9sQAAvrEAAL+xAACw8QAAsfkAALLBAACzwQAAtLEAALWxAAC2sQAAt7EAALPZAgCyWgCAvnADAL5EBAC2WgCAthEDALX1AgC6WgCAuz0DALo1AwC+WgCAwloAgL91AwC+dQMAvRUDALwVAwDGWgCAo50CAMpaAIDOWgCAplUDANJaAIDWWgCApbECAKpxAwCreQMA2loAgN5aAICuMQMArzEDAKxRAwCtUQMAqDkDAKk5AwCqjQAAq50AAKyNAACtvQAArrUAAK/dAADiWgCA5loAgOpaAIDuWgCA8loAgPZaAID6WgCA/loAgLhpAAC5aQAAunkAALt5AAC8aQAAvWkAAL7ZAQC/2QEAsKkAALGpAACyvQAAs7UAALSZAAC1mQAAtlkAALdZAAACWwCABlsAgApbAIAOWwCA70QAABJbAICGmAUAh+QCAOOYAACEqAIA4fgBABpbAICAOQAAgTkAAIItAAAeWwCAs0UBACJbAIAmWwCAKlsAgC5bAIC2fQEAtUUBADJbAIC7LQEAui0BADZbAIA6WwCAvx0BAL4dAQC9IQEAvCkBAD5bAIDhUA4AQlsAgOM8DwBGWwCASlsAgE5bAIBSWwCAVlsAgFpbAIDjAAAAXlsAgGJbAIBmWwCAhPQFAO/kDgCuqQEAr6kBAKydAQCtlQEAqpkBAKuZAQBqWwCAblsAgKbJAQByWwCAdlsAgKXxAQCC/QcAo/EBAID9BwCB9QcAFlsAgHpbAIB+WwCAglsAgIZbAICKWwCAhrgDAIeQAwCoDQcAqRkHAKptBwCrZQcArH0HAK1lBwCuZQcAr1UHALAtBwCxxQcAssEHALPdBwC0xQcAtc0HALbFBwC3/QcAuMUHALnJBwC62QcAu9kHALypBwC9qQcAvp0HAL+VBwCzxQcAjlsAgJJbAICWWwCAmlsAgLbFBwC11QcAnlsAgLshBwC6yQcAolsAgKZbAIC/KQcAviEHAL0pBwC8NQcAqlsAgKOBBwCuWwCAslsAgKaBBwC2WwCAulsAgKWRBwCqjQcAq2UHAL5bAIDCWwCArmUHAK9tBwCscQcArW0HAKgVAQCpgQEAqoEBAKuBAQCsgQEArYkBAK6xAQCvsQEAxlsAgMpbAIDOWwCA0lsAgNZbAIDaWwCA3lsAgOJbAIC4ZQAAuW0AALplAAC7fQAAvGUAAL1tAAC+ZQAAv90AALChAQCxrQEAsqUBALO5AQC0qQEAtZ0BALaVAQC3XQAA5lsAgIIdAACBHQAAgB0AAOpbAIDuWwCA8lsAgL5YAQCErAIA9lsAgIcIAQCGjAEA+lsAgKZaAID+WwCAAlwAgLNJAQAGXACAClwAgA5cAIASXACAtkkBALVJAQAWXACAuykBALolAQAaXACAHlwAgL8ZAQC+LQEAvS0BALwxAQC+2AMAIlwAgO/4BgAmXACAKlwAgC5cAIDv4AIAMlwAgOGUAQA2XACA43QCADpcAIDhmAUAPlwAgOMMBwBCXACARlwAgEpcAICjwQIAhIwDAKXBAgBOXACAUlwAgKbBAgBWXACAWlwAgKuhAgCqrQIAraUCAKy5AgCvkQIArqUCAKgxAwCpPQMAqjUDAKtJAwCsWQMArVkDAK5JAwCvQQMAgMUAAIEJAACCGQAAXlwAgGJcAIBqXACAh2wDAIYcHAC47QAAufEAALr1AAC7jQAAvJUAAL2BAAC+gQAAv70AALAJAwCxCQMAsu0AALPhAAC04QAAteEAALblAAC32QAAblwAgHJcAIB2XACAs7ECAHpcAIC13QIAttUCAH5cAICCXACAhlwAgLrBAgC7wQIAvDUBAL05AQC+KQEAvykBAKaNAgCKXACAjlwAgKWFAgCSXACAo+kCAJZcAICaXACArnEBAK9xAQCsbQEArWEBAKqZAgCrmQIAnlwAgKJcAICmXACA4YQGAKpcAIDjJAYArlwAgOGUAQCyXACA4ywAAL7oHQC2XACAulwAgO/IAACE/B0AvvAcAL5cAIDvSAcAwlwAgMZcAIDKXACAzlwAgIEdAACAHQAA0lwAgIIFAACGQBwAh8QcANpcAIDeXACA4lwAgOZcAIDqXACA7lwAgKi1HgCpBR8Aqg0fAKsFHwCsAR8ArQkfAK45HwCvOR8A1lwAgPJcAID2XACA+lwAgP5cAIACXQCABl0AgApdAIC4yR8AudUfALrRHwC76R8AvPkfAL3tHwC+mR8Av5kfALAlHwCxLR8AsjkfALM1HwC0LR8AtQ0fALYFHwC3/R8As4UfAA5dAIASXQCAFl0AgBpdAIC2iR8AtYkfAB5dAIC76R8AuuEfACJdAIAmXQCAv8kfAL7pHwC94R8AvO0fACpdAICjwR8ALl0AgDJdAICmzR8ANl0AgDpdAIClzR8AqqUfAKutHwA+XQCAQl0AgK6tHwCvjR8ArKkfAK2lHwCo6R4AqekeAKr5HgCr+R4ArOkeAK3pHgCuPQEArzUBAID5AQCBzQEAgsUBAIRgAgBGXQCASl0AgIdoAQCGnAAAuNEBALnZAQC64QEAu+EBALyRAQC9nQEAvpUBAL+JAQCwTQEAsVUBALJdAQCzVQEAtE0BALXxAQC28QEAt/EBALNxHgBOXQCAUl0AgFZdAIBaXQCAtmkeALVhHgBeXQCAu5EBALqJAQBiXQCAZl0AgL81AQC+iQEAvYEBALyJAQBqXQCAZlwAgKM5HgBuXQCApSkeAHJdAIB2XQCApiEeAHpdAIB+XQCAq9kBAKrBAQCtyQEArMEBAK99AQCuwQEAgl0AgIZdAICKXQCAjl0AgJJdAICWXQCAml0AgJ5dAICiXQCApl0AgKpdAICuXQCAsl0AgLpdAIC+XQCAvnADAOHkHgCESAIA4+gfAIQABACAeQAAgXkAAIJpAADCXQCAhsAEAIdEAwDGXQCAyl0AgM5dAIDSXQCA7yAfANZdAIDaXQCA3l0AgOJdAIDvSAIA5l0AgOpdAIDuXQCA8l0AgL7oBAD2XQCA+l0AgP5dAIACXgCA4ZABAAZeAIDj6AIAs0kDAApeAIAOXgCAEl4AgBZeAIC2SQMAtUkDABpeAIC7LQMAuiUDAB5eAIAiXgCAvxUDAL4VAwC9IQMAvCkDAKg1AgCpgQIAqoECAKuBAgCsgQIArYkCAK6xAgCvsQIAgP0BAIHNAQCCxQEAKl4AgIaQBACHBAUALl4AgIRwBAC4SQEAuUkBALpZAQC7WQEAvEkBAL1JAQC+eQEAv3kBALChAgCxqQIAsr0CALO1AgC0kQIAtZECALZ5AQC3eQEAMl4AgDZeAIA6XgCAPl4AgEJeAIBGXgCASl4AgO/QHgC+6AQA4VweAE5eAIDjkAAAUl4AgFZeAIBaXgCAXl4AgKNJAgBiXgCAZl4AgGpeAIBuXgCApkkCAKVJAgByXgCAqy0CAKolAgB2XgCAel4AgK8VAgCuFQIArSECAKwpAgCoNQYAqT0GAKpVBgCrZQYArH0GAK1lBgCubQYAr2EGACZeAIB+XgCAgl4AgIZeAICADQAAgbEAAIKxAACKXgCAuOkGALnpBgC6+QYAu/UGALyVBgC9nQYAvpUGAL+NBgCw4QYAseEGALLhBgCz/QYAtOUGALXtBgC25QYAt9kGALPdBgCOXgCAkl4AgJZeAICaXgCAtuUGALX1BgCeXgCAuyUGALolBgCGmAAAh6wAAL8pBgC+IQYAvSkGALw1BgCiXgCAo5kGAKZeAICqXgCApqEGAK5eAICyXgCApbEGAKphBgCrYQYAtl4AgLpeAICuZQYAr20GAKxxBgCtbQYAqC0GAKk9BgCqiQYAq4kGAKyZBgCtmQYArokGAK+JBgC+XgCAwl4AgMZeAIDKXgCAzl4AgNJeAIDWXgCA2l4AgLiNBgC5lQYAupUGALulBgC8vQYAvXEBAL5xAQC/cQEAsPkGALHNBgCy2QYAs9kGALTJBgC1yQYAtr0GALe1BgCzAQYA3l4AgOJeAIDmXgCA6l4AgLYZBgC1EQYA7l4AgLsJBgC6PQYA8l4AgPZeAIC/DQYAvg0GAL0NBgC8DQYA+l4AgKNFBgC2XQCA/l4AgKZdBgACXwCAhFgAAKVVBgCqeQYAq00GAL5oAQAGXwCArkkGAK9JBgCsSQYArUkGAIDBAwCByQMAgt0DAKPNAgAKXwCApdkCAKbNAgAOXwCAhoANAIeUAwCqxQIAqw0DAKwVAwCtHQMArhUDAK8NAwDhnBcA4xgGAOMUAwDhNAYA7xgCABJfAIAWXwCAGl8AgOPQAgAeXwCA4VACACJfAIAmXwCA7ywGAO/kJQAqXwCArE0CAK1RAgCuUQIAr2UCAKgBAgCpCQIAqlkCAKtVAgCE7A0ALl8AgDJfAIA2XwCAvvgNADpfAIA+XwCAQl8AgLxRAwC9WQMAvmEDAL9hAwC47QMAuVEDALpRAwC7UQMAtM0DALXVAwC23QMAt9UDALAdAgCx1QMAst0DALPVAwDjyAAARl8AgOG4AQBKXwCAhFQPAE5fAIBSXwCAVl8AgKHpAgCgFQYAo6UDAKINAwDvIAAAWl8AgF5fAIBiXwCAZl8AgGpfAICFNCYAs40DAG5fAIC1mQMAto0DAHJfAICGwA8Ah5QNALqFAwC7TQIAvFUCAL1dAgC+VQIAv00CAHpfAIB+XwCAgl8AgIZfAICKXwCAjl8AgI/d6wDvxAYAvuAPAOGMBgCSXwCA44AGAID1AACB5QAAguUAAJZfAICZbR8AmMUfAJvJGwCaeRoAnXUaAJzFGwCf+QcAnhkGAJFpFgCQsesAk20XAJLNFwCV0RMAlGkSAJdREgCWzRMAg1XkAIJB5AB2XwCAml8AgIeNHQCGkRgAhTkYAISVGQCLERwAigUcAJ5fAICiXwCAj4UVAI6ZEACNORAAjJUdAJNRFACSRRQApl8AgKpfAICXYQkAlnUIAJWdCQCU+RUAm0EMAJqtDQCuXwCAsl8AgLZfAIC6XwCAvl8AgJzxDAChbQ0Awl8AgKMBBACihQAApZkEAKSRBACnGTgApsUFAKkJOACoKTgAq4k8AKoBPACtATAArB08AK8pMACunTAAseE0ALABNACzASgAsv00ALXZKAC00SgAxl8AgMpfAIDOXwCA0l8AgNZfAIDaXwCAgB0AAIEJAACC2QEA3l8AgKgRDwCpGQ8Aql0PAKtVDwCsTQ8ArXEPAK51DwCvbQ8A4l8AgOpfAICGiAAAhxABAO5fAIDyXwCA9l8AgPpfAIC4TQ4AuVEOALpRDgC7UQ4AvGUOAL1tDgC+ZQ4Avx0OALAdDwCxwQ8AssEPALPBDwC0xQ8Atc0PALbFDwC3eQ4As9UPAP5fAIACYACABmAAgApgAIC28Q8AtcUPAA5gAIC7BQ8AutkPABJgAIAWYACAvwkPAL4BDwC9FQ8AvBUPABpgAICjkQ8AHmAAgCJgAICmtQ8AJmAAgCpgAIClgQ8Aqp0PAKtBDwAuYACAMmAAgK5FDwCvTQ8ArFEPAK1RDwCogQ0AqYENAKqBDQCrgQ0ArIENAK2BDQCusQ0Ar6ENADZgAIA6YACAPmAAgEJgAIBGYACAgrkAAIG9AACAvQAAuDUCALk9AgC6zQIAu5UCALyNAgC9tQIAvr0CAL+1AgCwbQIAsU0CALJFAgCzJQIAtD0CALUdAgC2FQIAtw0CAEpgAIBOYACAswENAFJgAIC1AQ0AWmAAgISUAwC2CQ0AviwEAF5gAIC7gQIAuqECAL35AgC8mQIAv9ECAL7xAgBiYACAZmAAgGpgAICjRQ0AbmAAgKVFDQCmTQ0AcmAAgIbgBACHpAQAquUCAKvFAgCs3QIArb0CAK61AgCvlQIAqCUCAKk1AgCqPQIAqzUCAKwtAgCtkQIArpECAK+RAgB2YACAemAAgH5gAICCYACAzAAAAIZgAICKYACAjmAAgLiZAgC5rQIAuqUCALttAQC8dQEAvX0BAL51AQC/bQEAsPECALH5AgCywQIAs8ECALSxAgC1vQIAtrUCALepAgCSYACA44QOAJZgAIDh9A4AmmAAgJ5gAICiYACApmAAgIQgBQCqYACArmAAgLJgAIC2YACA7+wOALpgAIC+YACAs/UCAMJgAICG6AQAh4wEAL5cBAC2UQIAteUCAMpgAIC7fQIAunUCAM5gAIDSYACAvzkCAL41AgC9VQIAvFUCAKM1BQBWYACAxmAAgNZgAIDaYACAppEFAKUlBQDeYACAq70FAKq1BQDiYACA5mAAgK/5BQCu9QUArZUFAKyVBQCA+QcAgfkHAIKNBwCzjQYA6mAAgLWdBgC2iQYA7mAAgPJgAID2YACAuk0HALtFBwC8XQcAvUEHAL5BBwC/QQcA+mAAgP5gAIDmXwCAAmEAgAZhAIAKYQCADmEAgBJhAICoNQYAqQEGAKppBgCraQYArHkGAK1lBgCuZQYAr50HALDlBwCx7QcAsuUHALP5BwC06QcAtekHALZZBwC3VQcAuHEHALlxBwC6cQcAu3EHALxVBwC9XQcAvlUHAL9NBwCjwQcAFmEAgBphAIAeYQCAImEAgKbFBwCl0QcAJmEAgKsJBgCqAQYAKmEAgC5hAICvDQYArg0GAK0NBgCsEQYAgGkAAIFpAACCBQAAMmEAgL6YAQCEmAEANmEAgDphAICGADwAh8QBAD5hAIBCYQCARmEAgEphAIBOYQCAUmEAgKhdBgCpbQYAqmUGAKuBAQCsgQEArYkBAK6xAQCvsQEAVmEAgFphAIBeYQCAYmEAgGZhAIBqYQCAbmEAgHJhAIC4VQEAuV0BALpVAQC7yQAAvNkAAL3ZAAC+yQAAv8EAALCxAQCxuQEAsokBALOJAQC0cQEAtXEBALZ1AQC3bQEAs+0FAHZhAIB6YQCAfmEAgIJhAIC2CQIAtQkCAIZhAIC7fQIAunUCAIphAICOYQCAv7UCAL61AgC9XQIAvF0CAL5gAgCjqQUAkmEAgJZhAICmTQIAmmEAgJ5hAIClTQIAqjECAKs5AgCiYQCAhOADAK7xAgCv8QIArBkCAK0ZAgC+iDwAqmEAgKotAwCrJQMArD0DAK0lAwCuLQMAryUDAID1AACB/QAAgsEAAKPBAwCuYQCApcEDAKbBAwCyYQCAhmA8AIdUAwC2YQCAumEAgL5hAIDjqAIAwmEAgOGkAQDGYQCA71wCAMphAIDOYQCA0mEAgNZhAIDaYQCA3mEAgOJhAIDjjAcA5mEAgOE8BADqYQCA7mEAgPJhAID2YQCAhCACAPphAID+YQCAAmIAgAZiAIDvbAcACmIAgA5iAICzLQIAhEQ9ABJiAIAaYgCAHmIAgLYtAgC1LQIAImIAgLvJAgC6wQIAJmIAgCpiAIC/yQIAvsECAL3JAgC80QIA4XgHAOPAAADjOAYA4VwGAICpAACBqQAAgtEAAC5iAIAyYgCANmIAgL6kPAA6YgCAPmIAgO8cAADvkAYAQmIAgIZgPACHBD0ARmIAgLNxAQBKYgCAtRkBALYJAQBOYgCAUmIAgFZiAIC6AQEAuwEBALwBAQC9AQEAvgEBAL8BAQCohT4AqbU+AKq1PgCrxT4ArN0+AK3FPgCuwT4Ar/0+AFpiAIBeYgCAYmIAgGZiAIBqYgCAbmIAgHJiAIB2YgCAuFE/ALlRPwC6UT8Au1E/ALx1PwC9fT8AvnU/AL9tPwCwiT4AsYk+ALKZPgCzmT4AtIk+ALWJPgC2eT8At3U/AKZhAICjOT4AemIAgBZiAICmQT4AfmIAgIJiAIClUT4Aqkk+AKtJPgCGYgCAimIAgK5JPgCvST4ArEk+AK1JPgCASQAAgVEAAIJRAACzkT8AjmIAgLW5PwC2RT8AkmIAgIZAAACHBAMAukU/ALtdPwC8TT8AvT0/AL4pPwC/IT8AqE0+AKlVPgCqVT4Aq2U+AKx9PgCtiT4Arrk+AK+5PgCWYgCAmmIAgJ5iAICiYgCApmIAgKpiAICuYgCAsmIAgLhhAQC5YQEAumEBALthAQC8YQEAvWEBAL5hAQC/YQEAsM0+ALHVPgCy1T4As6U+ALShPgC1qT4Atpk+ALeZPgCj3T4AtmIAgLpiAIC+YgCAwmIAgKYJPgCl9T4AxmIAgKsRPgCqCT4AymIAgM5iAICvbT4ArmU+AK1xPgCsAT4A0mIAgNZiAIDaYgCA3mIAgOJiAIDmYgCA6mIAgO5iAICAOQAAgTkAAIIFAADyYgCAvrgBAIS4AQD6YgCA/mIAgKitAgCp1QIAqtUCAKstAwCsNQMArT0DAK41AwCvLQMAAmMAgAZjAIAKYwCADmMAgBJjAIAWYwCAGmMAgB5jAIC46QMAuekDALqJAwC7iQMAvJkDAL2ZAwC+iQMAv4kDALBVAwCxXQMAslUDALPpAwC0+QMAtfkDALbpAwC34QMAs10CACJjAICGKAQAh8wDACZjAIC2vQMAtb0DACpjAIC7mQMAupEDAC5jAIAyYwCAvz0DAL49AwC9PQMAvIEDAIUAFACjGQIANmMAgDpjAICm+QMAPmMAgEJjAICl+QMAqtUDAKvdAwBGYwCASmMAgK55AwCveQMArMUDAK15AwDjVD4A4dw/AOHQPgDjPD4ATmMAgO8cAABSYwCAVmMAgFpjAIDjwAAAXmMAgOHUAQDvYD4AYmMAgGpjAIDvRD8AgGEAAIFtAACCfQAAhAAFAIbwBACHnAUAvhAFAG5jAIByYwCAdmMAgHpjAIB+YwCAgmMAgIZjAICKYwCAjmMAgLiJPQC5iT0Aupk9ALuRPQC8uT0Avbk9AL7RPQC/0T0AsAU+ALENPgCyBT4Asx0+ALQFPgC1DT4AtgU+ALe5PQConT4Aqa0+AKqlPgCrvT4ArKU+AK2tPgCupT4Ar30+AISsBAC+rAQAkmMAgJZjAICaYwCAnmMAgKJjAICmYwCAqPkFAKn5BQCqKQYAqykGAKw5BgCtOQYArikGAK8pBgBmYwCAqmMAgK5jAICyYwCAtmMAgLpjAIC+YwCAwmMAgLiNBgC5kQYAupEGALulBgC8vQYAvUUHAL5BBwC/QQcAsFkGALFZBgCy7QYAs/0GALTtBgC13QYAttUGALe1BgCzoQYAxmMAgMpjAIDOYwCA0mMAgLa5BgC1sQYA2mMAgLudBgC6nQYA1mMAgPZiAIC/GQYAvikGAL0pBgC8OQYAglEAAKPlBgCAQQAAgUEAAKb9BgDeYwCA4mMAgKX1BgCq2QYAq9kGAIZIAACHbAAArm0GAK9dBgCsfQYArW0GAKg5BgCpWQYAqmkGAKtpBgCseQYArXkGAK5pBgCvaQYA5mMAgOpjAIDuYwCA8mMAgPZjAID6YwCA/mMAgAJkAIC4ZQEAuW0BALplAQC7fQEAvGUBAL1tAQC+ZQEAv9kBALAZBgCxGQYAsoEGALOBBgC0gQYAtYEGALaBBgC3gQYAs+EGAAZkAIAKZACADmQAgBJkAIC2+QYAtfEGABZkAIC73QYAut0GABpkAIAeZACAv0UGAL5FBgC9VQYAvFUGACJkAICjpQYAJmQAgCpkAICmvQYALmQAgDJkAICltQYAqpkGAKuZBgA2ZACAOmQAgK4BBgCvAQYArBEGAK0RBgConQIAqdECAKrRAgCrLQMArDUDAK09AwCuNQMAry0DAD5kAIBCZACAvmQCAEpkAIBOZACAUmQAgFZkAIBaZACAuOkDALnpAwC6iQMAu4UDALydAwC9gQMAvoEDAL+1AwCwVQMAsV0DALJVAwCz6QMAtPkDALX5AwC26QMAt+EDAIBtAwCBpQAAgq0AALNVAgBeZACAtbEDALaxAwBiZACAhOACAGZkAIC6nQMAu5UDALyNAwC9MQMAvjEDAL8xAwCjGQIAamQAgIVwaQBuZACAcmQAgKb9AwCl/QMAdmQAgKvZAwCq0QMAhkgMAIe8AwCvfQMArn0DAK19AwCswQMAemQAgH5kAICCZACAhmQAgO+wBgDvxAMAimQAgI5kAIDjfAYA45QDAOG4BwDh3AEAkmQAgJZkAICaZACAnmQAgKJkAICmZACAhEQCAL5YDQCADQAAgTUAAII9AACqZACArmQAgLJkAICGyAwAh1wNALpkAIC+ZACAwmQAgMZkAIDKZACAzmQAgNJkAIDWZACA2mQAgN5kAIDiZACA74AGAISsDQDh7AYA5mQAgONcBgDqZACA7mQAgPJkAID2ZACAs/UBAPpkAID+ZACAAmUAgAZlAIC2RQEAteUBAAplAIC7LQEAuiEBAA5lAIASZQCAv/UAAL71AAC9JQEAvC0BAKgtDgCpNQ4Aqj0OAKs1DgCsLQ4ArYUOAK6FDgCvuQ4AtmQAgBZlAIAaZQCAHmUAgIAZAACBGQAAggUAACJlAIC4WQ8AuVkPALp5DwC7eQ8AvGkPAL1pDwC+GQ8AvxkPALClDgCxqQ4AsrkOALOxDgC0cQ8AtXEPALZxDwC3cQ8Apb0OAL6IAwAqZQCAph0OACZlAIAuZQCAo60OADJlAICtfQ4ArHUOAK+tDwCurQ8ARmQAgDZlAICrdQ4AqnkOALO5DwA6ZQCAhmgAAIcMAwA+ZQCAtlEPALVZDwBCZQCAu3UPALp1DwBGZQCASmUAgL9FDwC+RQ8AvVEPALxlDwCocQ4AqXEOAKpxDgCrcQ4ArJEOAK2RDgCukQ4Ar5EOAE5lAIBSZQCAVmUAgFplAIBeZQCAYmUAgGZlAIBqZQCAuIUOALmNDgC6hQ4Au50OALyNDgC9vQ4AvrUOAL95AQCw8Q4AsfEOALLxDgCzxQ4AtMEOALXBDgC2wQ4At8EOAKP5DgBuZQCAcmUAgHZlAIB6ZQCAphEOAKUZDgB+ZQCAqzUOAKo1DgCCZQCAhmUAgK8FDgCuBQ4ArREOAKwlDgCADQAAgRUAAIIdAACKZQCAjmUAgJJlAICElAEAvpQBAIZABwCH5AAAmmUAgJ5lAICiZQCApmUAgKplAICuZQCAqIkCAKmRAgCqlQIAq7kCAKzVAgCtxQIArsUCAK/1AgCyZQCAtmUAgLplAIC+ZQCAvnwDAMJlAIDGZQCAymUAgLh9AwC5wQMAusEDALvBAwC8wQMAvckDAL7xAwC/8QMAsI0CALFFAwCyTQMAs0UDALRdAwC1RQMAtk0DALdFAwCzHQIAzmUAgNJlAIDWZQCA2mUAgLZFAgC1XQIA3mUAgLuBAwC6SQIA4mUAgOZlAIC/gQMAvpkDAL2RAwC8mQMA6mUAgKNZAgDuZQCA8mUAgKYBAgD2ZQCA+mUAgKUZAgCqDQIAq8UDAP5lAIACZgCArt0DAK/FAwCs3QMArdUDAIDZAQCB7QEAguUBAO+4DgAKZgCA4cQBAISYAgDj1AAADmYAgL7sBAASZgCA7wgAABZmAIDhxA8AGmYAgONkDgCGAAUAh2gFAB5mAICzvQIAImYAgLWtAgC2pQIAJmYAgCpmAIAuZgCAukEBALtBAQC8RQEAvU0BAL5FAQC/+QEAMmYAgDZmAIA6ZgCAPmYAgEJmAIBGZgCASmYAgO/gAQCEbAQA4dQOAE5mAIDjHA4AUmYAgFZmAIBaZgCAXmYAgKMxAgBiZgCAhCQHAGZmAIBqZgCApikCAKUhAgBuZgCAq80BAKrNAQByZgCAemYAgK91AQCuyQEArcEBAKzJAQCo6QUAqekFAKr5BQCr+QUArOkFAK3pBQCuOQYArzkGAAZmAICCzQcAgfUHAID9BwB2ZgCAfmYAgIYYAwCHkAMAuNEGALnZBgC64QYAu+EGALyRBgC9nQYAvpUGAL+JBgCwSQYAsUkGALJdBgCzVQYAtE0GALXxBgC28QYAt/EGALDhBwCx4QcAsgkHALMJBwC0GQcAtRkHALYJBwC3CQcAuDkHALkNBwC6GQcAuxkHALwJBwC9CQcAvn0HAL9xBwCCZgCAlmUAgIZmAICKZgCAjmYAgJJmAICWZgCAmmYAgKjxBwCpxQcAqsEHAKvdBwCsyQcArb0HAK6pBwCvoQcAsykGAJ5mAICiZgCApmYAgKpmAIC2XQYAtSEGAK5mAIC7RQYAukUGALJmAIC2ZgCAv70GAL69BgC9vQYAvL0GALpmAICjbQYAvmYAgMJmAICmGQYAxmYAgMpmAIClZQYAqgEGAKsBBgDOZgCA0mYAgK75BgCv+QYArPkGAK35BgCobQYAqbEBAKpJAQCrRQEArF0BAK1FAQCuTQEAr0UBANZmAICCHQAAgR0AAIAdAADaZgCA3mYAgOJmAIC+VAEAuIEAALmNAAC6hQAAu5kAALyJAAC9vQAAvrUAAL99AACwPQEAseEAALLhAACz4QAAtOEAALXpAAC20QAAt9EAALsFAwC62QIAhiwCAIcsAwC/DQMAvgUDAL0VAwC8FQMAs+ECAOpmAIDuZgCAhCwDAPJmAIC25QIAtfUCAPZmAICqnQIAq0EDAPpmAID+ZgCArkEDAK9JAwCsUQMArVEDAAJnAICjpQIABmcAgApnAICmoQIADmcAgBJnAIClsQIAqakAAKihAACrtQAAqr0AAK3dAACs3QAAr/EAAK79AAC+LBwAFmcAgBpnAIAeZwCAImcAgCZnAIAqZwCALmcAgLl9AAC4fQAAu80BALrNAQC93QEAvN0BAL/NAQC+zQEAsZUAALCJAACzTQAAspUAALVdAAC0XQAAt00AALZNAAAyZwCANmcAgDpnAIA+ZwCAQmcAgEZnAIBKZwCATmcAgIA5AACBOQAAggUAAFJnAIBaZwCAXmcAgIf4AgCGfB0A4bgEAL7IHADjQAYAYmcAgGZnAIBqZwCAbmcAgHJnAIB2ZwCAemcAgH5nAICCZwCAhmcAgIpnAIDvsAcAjmcAgJJnAICWZwCAmmcAgO/IAACeZwCAomcAgKZnAIDvQAYAqmcAgOH8BgCuZwCA4xwGALJnAIDhlAEAtmcAgONkBgCAEQAAgRkAAIIpAACz/QEAumcAgLWdAQC2lQEAvmcAgMJnAICEbB0AuoUBALuZAQC8iQEAvVEBAL5RAQC/UQEAozEeAFZnAIDGZwCAymcAgM5nAICmWR4ApVEeANJnAICrVR4AqkkeAIYIAwCHbAMAr50eAK6dHgCtnR4ArEUeANZnAICzCR8A2mcAgN5nAIC2CR8A4mcAgOZnAIC1CR8AugUfALsNHwDqZwCA7mcAgL4FHwC/CR8AvBUfAL0NHwCw5R8Ase0fALLlHwCz/R8AtOUfALXpHwC2GR8AtxkfALgpHwC5NR8Auj0fALs1HwC8ER8AvR0fAL4JHwC/BR8A8mcAgPZnAIDmZgCA+mcAgP5nAIACaACABmgAgApoAICo0R8AqdEfAKqlHwCrvR8ArKUfAK2tHwCupR8Ar50fAKNNHgAOaACAEmgAgBZoAIAaaACApk0eAKVNHgAeaACAq0keAKpBHgAiaACAJmgAgK9NHgCuQR4ArUkeAKxRHgCADQAAgRUAAIIdAAAqaACALmgAgDJoAICEtAEAvrQBAL/oAQA6aACAhkgHAIc0AACEvAYAPmgAgEJoAIC+tAYAqI0BAKmVAQCqlQEAq80BAKzZAQCt2QEArs0BAK/FAQBGaACASmgAgE5oAIBSaACAVmgAgFpoAIBeaACAYmgAgLgdAQC5wQAAusEAALvBAAC8wQAAvckAAL7xAAC/8QAAsIkBALGJAQCyKQEAsykBALQ9AQC1JQEAti0BALclAQC7bQIAum0CAGZoAIBqaACAv8ECAL7ZAgC93QIAvN0CALM9AgBuaACAcmgAgHZoAICE/AYAtnkCALVxAgB6aACAqikCAKspAgB+aACAgmgAgK6dAgCvhQIArJkCAK2ZAgCGaACAo3kCAIpoAICOaACApj0CAJJoAICWaACApTUCAIJtJwCDjSoAhqgFAIdsAwCGmS4Ah80vAIQRLgCFmS4AiiESAIspEgCaaACAnmgAgI6RFgCPHRYAjBESAI0RFgCScRoAk+UaAKJoAIDvlHYAlvEeAJflHgCUSRoAlRkeAJopAgCb4QIAqmgAgK5oAICyaACA4SASAJzxAgDjIBYAnyEfAJ7BHwCdmRsAnC0bAJuhGwCavRcAmTkXAJixFwCXiRMAlqkTAJWpEwCUdS4AkzkvAJIxLwCRsS8AkDUrAI+tJgDjeB8A0gAAAOFcHwCCmQEAtmgAgIDxAQCB8QEAvqgHALpoAIC+aACAwmgAgIS8BgDvLB8AxmgAgMpoAIDhpB4A48wAAON8HgDhvAEAzmgAgNJoAIDWaACAhJwGANpoAIC+bAYA3mgAgOJoAIDmaACA7xAAAO8EHgDqaACA7mgAgPJoAID2aACA+mgAgP5oAIACaQCABmkAgAppAICAPQAAgQkAAILJBwAOaQCAo/kDAKLxAwChMQMAoM0fALBJcQCxAXwAsgl8ALMhfQC0AXgAtRV4ADZoAICmaACAEmkAgL4oDgCGDAAAh4wDABZpAIAaaQCAHmkAgCJpAIAmaQCAoV0AAKJVAACjfQAApAEMAKUVDACm9QwApwEIAKghCACpxQgAqgF0AKsJdACsAXQArR11AK55cACveXAAqOUFAKnxBQCq8QUAqy0FAKw1BQCtPQUArjUFAK8tBQAqaQCALmkAgDJpAIA2aQCAOmkAgD5pAIBCaQCARmkAgLj9BgC5jQYAuoUGALutBgC8uQYAvbkGAL6tBgC/pQYAsFUFALFdBQCyVQUAs+UGALT9BgC10QYAttEGALfRBgCzeQQASmkAgE5pAIBSaQCAVmkAgLa9BAC1vQQAWmkAgLuZBAC6kQQAXmkAgGJpAIC/FQcAvjkHAL0xBwC8gQQAZmkAgKM9BABqaQCAbmkAgKb5BAByaQCAdmkAgKX5BACq1QQAq90EAHppAIB+aQCArn0HAK9RBwCsxQQArXUHAKhpBwCpaQcAqnkHAKvZBgCs9QYArf0GAK71BgCv5QYAgMkAAIHJAACCBQAAgmkAgIZwDwCHNAAAimkAgI5pAIC4fQYAuQUGALoNBgC7BQYAvB0GAL0FBgC+DQYAvwUGALCdBgCxdQYAsn0GALN1BgC0UQYAtV0GALZVBgC3TQYAs/EEAJJpAICWaQCAmmkAgJ5pAIC2fQUAtX0FAKJpAIC7sQUAulkFAKZpAICqaQCAv5kFAL6VBQC9oQUAvKkFAK5pAICjtQQAsmkAgLZpAICmOQUAumkAgL5pAIClOQUAqh0FAKv1BQDCaQCAxmkAgK7RBQCv3QUArO0FAK3lBQCpuQIAqLECAKvJAgCqsQIArTUCAKw1AgCvNQIArjUCAMppAIDOaQCA0mkAgNZpAIDaaQCA3mkAgOJpAIDmaQCAuekDALjZAwC7iQMAuuEDAL2dAwC8nQMAv4EDAL6JAwCxVQIAsFUCALNVAgCyVQIAtfkDALTxAwC36QMAtvEDALM9AwDqaQCA7mkAgPJpAID6aQCAtrEDALW5AwD+aQCAu5UDALqVAwCGiAwAh6ANAL85AgC+MQIAvYUDALyFAwACagCAo3kDAAZqAIAKagCApvUDAA5qAIASagCApf0DAKrRAwCr0QMAFmoAgBpqAICudQIAr30CAKzBAwCtwQMAgIUAAIGNAACChQAA79AGAOOwBwDj9AQA4QgHAOHsBADvOAYA7yAEAL6kDAAeagCAImoAgOGEAQAmagCA49wGACpqAIAuagCAhMANALPJAQAyagCAtdkBALbJAQA2agCAOmoAgD5qAIC6xQEAu60BALy5AQC9uQEAvq0BAL+lAQCwLQ4AsUUOALJBDgCzQQ4AtEUOALVNDgC2cQ4At3EOALiBDgC5gQ4AuoEOALuBDgC8gQ4AvYEOAL6BDgC/gQ4A9mkAgEJqAIBGagCASmoAgIZpAIBOagCAUmoAgFZqAICo2Q0AqdkNAKptDgCrZQ4ArH0OAK1lDgCuZQ4Ar1UOAKOFDgCCLQAAgRUAAIAdAABaagCApoUOAKWVDgBeagCAq+EOAKqJDgBiagCAZmoAgK/pDgCu4Q4ArfUOAKz1DgBqagCAs4UPAIZoAACHHAMAtoUPAG5qAIByagCAtZEPALqNDwC7SQ8AdmoAgHpqAIC+MQ8AvzEPALxJDwC9RQ8AqBEOAKkZDgCqSQ4Aq0UOAKxdDgCtQQ4ArkEOAK91DgB+agCAgmoAgIZqAICKagCAjmoAgJJqAICWagCAmmoAgLihDgC5oQ4Aug0BALsFAQC8HQEAvQEBAL4BAQC/AQEAsA0OALHJDgCy2Q4As9UOALSxDgC1sQ4AtqkOALehDgCjwQ4AnmoAgKJqAICmagCAqmoAgKbBDgCl1Q4ArmoAgKsNDgCqyQ4AsmoAgLZqAICvdQ4ArnUOAK0BDgCsDQ4AumoAgL5qAIDCagCAxmoAgIANAACBNQAAgj0AAMpqAIDOagCA0moAgISEAQC+hAEAhjAHAIf4AADaagCA3moAgKjBAgCp0QIAqtECAKvlAgCs/QIArTUDAK49AwCvNQMA4moAgOZqAIDqagCA7moAgPJqAID2agCA+moAgP5qAIC40QMAudkDALrhAwC74QMAvJEDAL2RAwC+kQMAv5EDALBNAwCxVQMAsl0DALNVAwC0TQMAtfEDALbxAwC38QMAu7EDALqpAwACawCAvoQDAL8VAwC+qQMAvaEDALypAwCzeQIABmsAgAprAIAOawCAEmsAgLaVAwC1VQIAFmsAgKrtAwCr9QMAGmsAgB5rAICu7QMAr1EDAKztAwCt5QMAImsAgKM9AgAmawCAKmsAgKbRAwAuawCAMmsAgKURAgA2awCAgiEAAIEVAACAFQAA7wQAAISUAgA6awCAPmsAgOPYAABCawCA4fgBAEprAIBOawCAUmsAgFZrAIBaawCAhmAFAIcIBQBeawCAs20BAGJrAIC1fQEAtnUBAGZrAIBqawCAbmsAgLpRAQC7UQEAvPkBAL3RAQC+0QEAv9EBAHJrAICjpQEAdmsAgHprAICmvQEAfmsAgIJrAICltQEAqpkBAKuZAQCGawCAimsAgK4ZAQCvGQEArDEBAK0ZAQCOawCA4fQOAJJrAIDjFA4A9AAAAOF8DACWawCA41AKAJprAICeawCAviAEAO8wDQCiawCApmsAgIQ0BADvrA4AsDkGALE5BgCygQYAs6kGALS5BgC1uQYAtqkGALehBgC46QYAuekGALrJBgC7xQYAvN0GAL3BBgC+wQYAvz0HAEZrAICCHQAAgR0AAIAdAACqawCArmsAgLJrAIDWagCAqJkFAKmZBQCqSQYAq0kGAKxZBgCtWQYArkkGAK9JBgCorQcAqbUHAKq9BwCrtQcArK0HAK3dBwCuyQcAr8EHALZrAIC6awCAhogDAIcQAwC+awCAwmsAgMZrAIDKawCAuG0HALkFBwC6AQcAuxUHALwxBwC9MQcAvikHAL8pBwCwgQcAsYEHALJpBwCzZQcAtH0HALVhBwC2YQcAt1UHALM1BgDOawCA0msAgNZrAIDaawCAtl0GALUlBgDeawCAu0UGALpFBgDiawCA5msAgL+lBgC+uQYAvbEGALy9BgDqawCAo3EGAO5rAIDyawCAphkGAPZrAID6awCApWEGAKoBBgCrAQYA/msAgAJsAICu/QYAr+EGAKz5BgCt9QYAqCUBAKk1AQCqPQEAqzUBAKwtAQCtkQAArpEAAK+RAAAGbACACmwAgA5sAIASbACAFmwAgIK9AwCBvQMAgL0DALiZAAC5rQAAuqUAALttAAC8dQAAvX0AAL51AAC/bQAAsPEAALH5AACywQAAs8EAALSxAAC1vQAAtrUAALepAAAabACAHmwAgCJsAICEgAIAvhwCACpsAICG+HwAh8wCAISsAwAubACAMmwAgDZsAIA6bACAPmwAgEJsAIBGbACAs/UCAEpsAIBObACAkgAAAFJsAIC2UQMAteUCAFZsAIC7fQMAunUDAFpsAIBebACAvzkDAL41AwC9VQMAvFUDAKM1AgBibACAZmwAgGpsAIBubACAppEDAKUlAgBybACAq70DAKq1AwB2bACAemwAgK/5AwCu9QMArZUDAKyVAwC+wAMAfmwAgIJsAICGbACAgA0AAIE1AACCPQAAimwAgI5sAICSbACAhsh8AIcAAwCabACAnmwAgKJsAICmbACAqmwAgK5sAICybACAtmwAgLpsAIC+bACAwmwAgO/0AwCE7HwA4ZQBAMZsAIDjMAMAymwAgM5sAIDSbACA1mwAgLNpAQDabACA3mwAgOJsAIDmbACAtmEBALVpAQDqbACAuykBALohAQDubACA8mwAgL8dAQC+HQEAvSUBALwtAQD2bACA+mwAgP5sAICjpQEAAm0AgKWlAQCmrQEAvlR8AIaAfACH7HwAqu0BAKvlAQCs4QEArekBAK7RAQCv0QEACm0AgOGcBgCEBH8A4yQGAOPUBgAObQCA4TAEABJtAIDvlAcAgnUAAIFhAACAaQAAFm0AgBptAIAebQCA7+wGALiNfgC5lX4AupV+ALulfgC8vX4AvdF+AL7RfgC/0X4AsGV+ALFtfgCyeX4As3F+ALRZfgC1WX4Atr1+ALe1fgCoVX4AqWF+AKphfgCrYX4ArGF+AK1hfgCuYX4Ar2F+ACJtAICWbACAJmwAgCZtAIAGbQCAKm0AgC5tAIAybQCAqHF+AKlxfgCqcX4Aq3F+AKyRfwCtkX8ArpF/AK+RfwA2bQCAOm0AgD5tAIBCbQCARm0AgEptAIBObQCAUm0AgLiFfwC5jX8AuoV/ALudfwC8jX8Avb1/AL61fwC/XX8AsPF/ALHxfwCy8X8As8V/ALTBfwC1wX8AtsF/ALfBfwCz+X8AVm0AgFptAIBebQCAYm0AgLYRfgC1GX4AZm0AgLs1fgC6NX4Aam0AgG5tAIC/BX4AvgV+AL0RfgC8JX4AghUAAKO9fwCAYQAAgWEAAKZVfgBybQCAvpABAKVdfgCqcX4Aq3F+AHZtAIB6bQCArkF+AK9BfgCsYX4ArVV+AKhBfgCpUX4AqlV+AKt9fgCsZX4ArW1+AK75AQCv8QEAhgAAAIc0AQB+bQCAgm0AgIZtAICKbQCAjm0AgJJtAIC4dQEAuX0BALp1AQC7yQAAvNkAAL3ZAAC+yQAAv8EAALCVAQCxnQEAspUBALNNAQC0VQEAtV0BALZVAQC3TQEAs919AJZtAICabQCAnm0AgKJtAIC27X0Ate19AKZtAIC7WQIAulECAKptAICubQCAv5kCAL6RAgC9mQIAvEECALJtAICjmX0Atm0AgLptAICmqX0Avm0AgMJtAIClqX0AqhUCAKsdAgDGbQCAym0AgK7VAgCv3QIArAUCAK3dAgDObQCA0m0AgNZtAIDabQCAgB0AAIEJAACCOQAA3m0AgOJtAIC+AAQA6m0AgO5tAIDybQCA9m0AgPptAID+bQCAhIwDAAJuAICHCAMAhuwEAAZuAIDviAIACm4AgA5uAICEbAQA4zQCABJuAIDhVAEAFm4AgBpuAIAebgCAIm4AgKhtAgCprQIAqqUCAKu9AgCspQIAra0CAK6lAgCvGQEAvqwEACZuAIAqbgCALm4AgDJuAIA2bgCAOm4AgD5uAIC4DQEAuREBALoRAQC7JQEAvD0BAL3VAQC+3QEAv9UBALBpAQCxaQEAsnkBALNxAQC0WQEAtVkBALY5AQC3NQEAsy0CAEJuAIBGbgCASm4AgE5uAIC2LQIAtS0CAFJuAIC7rQEAuq0BAFpuAIBebgCAv50BAL6dAQC9pQEAvK0BAIBNAACBVQAAglUAAO9sAABibgCA7+x/AO+8fgBmbgCA4RB/AOPUfwDj2H4A4ex/AGpuAIDhTH4Abm4AgOMkfgDmbQCAVm4AgKsFBgCqBQYArQ0GAKwFBgCvNQYArjUGAIYAAwCHKAMAo4UFAHJuAIClhQUAdm4AgHpuAICmhQUAs/EGAH5uAICCbgCAhm4AgIpuAIC26QYAteEGAI5uAIC7vQYAur0GAJJuAICWbgCAv4kGAL6BBgC9iQYAvJUGAKgpBgCpKQYAqjkGAKs5BgCsKQYArSkGAK5dBgCvTQYAmm4AgJ5uAICibgCApm4AgKpuAICubgCAsm4AgLZuAIC46QcAuekHALr5BwC7+QcAvOkHAL3pBwC+XQcAv1UHALA5BgCxOQYAsgEGALMdBgC0BQYAtQ0GALYFBgC32QcAo7EHAIItAACBFQAAgB0AALpuAICmqQcApaEHAL5uAICr/QcAqv0HAMJuAICEpAIAr8kHAK7BBwCtyQcArNUHAL7MAQCzlQYAxm4AgMpuAIC2qQYAzm4AgNJuAIC1rQYAulkBALshAQCGyAAAhwwBAL4hAQC/KQEAvDEBAL0xAQCoKQYAqSkGAKpZBgCrUQYArGEGAK1tBgCutQEAr6kBAITgAQDWbgCA2m4AgN5uAIDibgCA5m4AgOpuAIDubgCAuGEBALlhAQC6YQEAu2EBALxhAQC9YQEAvmEBAL9hAQCw2QEAsaEBALKhAQCzoQEAtKEBALWpAQC2kQEAt5EBAKPRBQDybgCA9m4AgPpuAID+bgCApu0FAKXpBQACbwCAq2UCAKodAgAGbwCACm8AgK9tAgCuZQIArXUCAKx1AgAObwCAEm8AgBZvAIAabwCAHm8AgCJvAIAmbwCAKm8AgIA9AACBCQAAghkAAC5vAIAybwCAOm8AgL48AwA+bwCAhgAMAIcUAwBCbwCAs9UDAEZvAIC1PQMAtjUDAEpvAIBObwCAv4wKALoRAwC7EQMAvLUAAL29AAC+tQAAv60AAFJvAIDjdAEAVm8AgOG8AQBabwCAXm8AgGJvAIBmbwCAam8AgG5vAIBybwCAdm8AgHpvAIDvdAIAfm8AgIJvAICoTQIAqVECAKpRAgCrqQIArLkCAK25AgCuqQIAr6kCAIRsDQCGbwCAim8AgI5vAICSbwCAlm8AgJpvAIC+dA0AuG0BALkFAQC6DQEAuwUBALwdAQC9BQEAvg0BAL8FAQCw2QIAsdkCALJtAQCzZQEAtH0BALVlAQC2ZQEAt1UBAOG4AQDhUAcA47QAAON8BwCAqQAAgQkAAII5AACebwCAom8AgKpvAICubwCAsm8AgO4AAAC2bwCA7wAAAO9kBgCGYAwAh+QMAKORAgC6bwCApXkCAL5vAIDCbwCApnECAMZvAIDKbwCAq1UCAKpVAgCt+QEArPEBAK/pAQCu8QEApm8AgDZvAIDObwCA0m8AgNZvAIDabwCA3m8AgOJvAICoVQ4AqVkOAKqhDgCrvQ4ArK0OAK2VDgCu+Q4Ar/UOALCRDgCxkQ4AspEOALORDgC0sQ4AtbEOALaxDgC3sQ4AuJEOALmdDgC6lQ4Au0kPALxZDwC9WQ8AvkkPAL9JDwCzCQ4A5m8AgOpvAIDubwCA8m8AgLY1DgC1BQ4A9m8AgLt1DgC6dQ4A+m8AgP5vAIC/VQ4AvlUOAL1lDgC8ZQ4AAnAAgKNNDgAGcACACnAAgKZxDgAOcACAEnAAgKVBDgCqMQ4AqzEOAISkAwC+pAMArhEOAK8RDgCsIQ4ArSEOAKilDgCprQ4AqqUOAKu5DgCs3Q4ArcEOAK7BDgCv/Q4AgO0BAIHxAQCC8QEAFnAAgIaQAQCHtAEAGnAAgB5wAIC4yQEAuckBALrZAQC70QEAvPkBAL35AQC+mQEAv5UBALCFDgCxbQEAsmUBALN9AQC0ZQEAtW0BALZlAQC3+QEAsy0OACJwAIAmcACAKnAAgC5wAIC2QQ4AtVUOADJwAIC7qQEAukEOADZwAIA6cACAv6kBAL6hAQC9qQEAvLEBAD5wAICjaQ4AQnAAgEZwAICmBQ4ASnAAgE5wAIClEQ4AqgUOAKvtAQBScACAVnAAgK7lAQCv7QEArPUBAK3tAQCoOQMAqTkDAKqNAwCrhQMArJ0DAK2FAwCuhQMAr7UDAFpwAIBecACAYnAAgGZwAIBqcACAbnAAgHJwAIB2cACAuGEAALlhAAC6YQAAu2EAALxhAAC9YQAAvmEAAL9hAACwzQMAsaUDALKhAwCzoQMAtKUDALWtAwC2kQMAt5EDAIANAACBEQAAghEAAHpwAIDv9AIAfnAAgIJwAIC+HAMA4xQCAISIAgDhgAEAinAAgI5wAICScACAh8gDAIY8BAC7AQMAumkDAJZwAICacACAvwkDAL4BAwC9FQMAvBUDALNlAwCecACAonAAgKZwAICqcACAtmUDALV1AwCucACAsnAAgLZwAIC6cACAo4kCAL5wAIClmQIApokCAMJwAICELAIAxnAAgKqFAgCr7QIArPkCAK35AgCu7QIAr+UCAMpwAIDOcACAvkQFAIRMBQDScACA1nAAgNpwAIDecACA4nAAgOZwAIDqcACA7nAAgIAZAACBGQAAggUAAPJwAIDhGA8A4VwOAOO4DgDjdAEA+nAAgP5wAIACcQCABnEAgIYABACHZAUACnEAgA5xAIAScQCAFnEAgO98DgDvqAEAs3UBABpxAIAecQCAInEAgCZxAIC2MQEAtRUBACpxAIC7HQEAuhUBAC5xAIAycQCAv+EAAL79AAC9/QAAvP0AAPZwAIA2cQCAOnEAgD5xAICGcACAQnEAgEZxAIBKcQCAqI0GAKmVBgCqnQYAq+UGAKz9BgCt0QYArtEGAK/RBgCwsQYAsbkGALJJBwCzSQcAtFkHALVFBwC2RQcAt3kHALghBwC5IQcAujkHALs5BwC8KQcAvSkHAL4ZBwC/GQcAozUGAE5xAIBScQCAVnEAgFpxAICmcQYApVUGAF5xAICrXQYAqlUGAGJxAIC+oAMAr6EHAK69BwCtvQcArL0HAIBRAACBWQAAgmEAALNVBwCF9AAAtX0HALZ1BwBmcQCAhgAcAIfkAQC6LQcAuyUHALw9BwC9JQcAviUHAL8VBwCokQYAqZEGAKqRBgCrkQYArLkGAK25BgCuqQYAr6kGAGpxAIBucQCAcnEAgHZxAICiIQEAozUBAKA5BQChEQQAuEkBALlJAQC6XQEAu1UBALxNAQC90QEAvtEBAL/RAQCwpQYAsa0GALKlBgCzvQYAtK0GALWdBgC2lQYAt3kBAKMZBgCPnXkAenEAgH5xAICCcQCApjkGAKUxBgCGcQCAq2kGAKphBgCKcQCAjnEAgK9ZBgCuaQYArWkGAKxxBgCeiQgAn8EFAJzJCQCdyQkAmqENAJu9DACYsQ0AmbkNAJahcQCXRXEAlEV1AJWxcQCSoXUAk7V1AJDleQCRzXkAil1yAItFcgCScQCAvoAcAI51DgCPZQ4AjLlyAI11DgCCOXoAgzl6AJZxAICacQCAhnF2AIeZdgCECXoAhW12AJptBwCbVQIAnnEAgKJxAICmcQCA4ZAAAJxZAgDjCBoAkgkPAJNlCgCqcQCA7zgWAJZ1BgCXdQYAlH0KAJU1CwCpjRYAqIUWAKsBEACqMRYArXESAKy1EgCvuS4ArgEsAKF9AgCucQCAo6EeAKKpHgClsRoApPUfAKflGwCmsRoAhMwDAIRMHACycQCAtnEAgLpxAIC+cQCAwnEAgMZxAICxASgAsNkuALONKgCy6SoAtfUmALQBJACEcB0AynEAgID9AQCBFQAAgh0AAL6AHADOcQCA0nEAgIe4AgCGPB0A2nEAgN5xAIDicQCA5nEAgOpxAIDucQCA8nEAgPZxAID6cQCA/nEAgAJyAIAGcgCA44ADAApyAIDhoAEADnIAgO+UAwAScgCAFnIAgBpyAIAecgCAInIAgCZyAIAqcgCALnIAgOE8BgAycgCA49AGADZyAIDhMAcAOnIAgOOsBgCAOQAAgRUAAIIdAADvHAYAPnIAgEJyAIC+uB8A7+gBALPpAgBKcgCAh8QcAIbsHABOcgCAtlkCALVRAgBScgCAu00CALpNAgBWcgCAWnIAgL+5AQC+2QEAvdEBALz1AQCjKR0A1nEAgEZyAIBecgCAYnIAgKaZHQClkR0AZnIAgKuNHQCqjR0AanIAgG5yAICveR4ArhkeAK0RHgCsNR4AcnIAgLNtHwB2cgCAenIAgLZlHwB+cgCAgnIAgLVtHwC6IR8AuyEfAIZyAICKcgCAviUfAL8pHwC8MR8AvTEfAKihHwCpoR8AqqEfAKuhHwCsoR8AraEfAK6hHwCvoR8AjnIAgJJyAICWcgCAmnIAgJ5yAICicgCApnIAgKpyAIC4rR8AubUfALq9HwC7tR8AvK0fAL1VHwC+UR8Av00fALChHwCxoR8AsqEfALOhHwC0pR8AtakfALadHwC3lR8AoykeAIIZAACBGQAAgLEBAK5yAICmIR4ApSkeALJyAICrZR4AqmUeAIaIAACH/AEAr20eAK5hHgCtdR4ArHUeALZyAICzmR4AunIAgL5yAIC2XQEAwnIAgMZyAIC1sR4AukkBALtJAQDKcgCAznIAgL49AQC/IQEAvDkBAL01AQCoRR4AqVUeAKpVHgCrZR4ArH0eAK2ZAQCuiQEAr4EBAISsAADScgCA1nIAgNpyAIDecgCA4nIAgOZyAIDqcgCAuK0BALllAQC6bQEAu2UBALx9AQC9ZQEAvm0BAL9lAQCwyQEAsckBALKpAQCzpQEAtL0BALWhAQC2oQEAt5UBALhpHAC5oRwAusEcALvBHAC8wRwAvcEcAL7BHAC/wRwAsIkfALGJHwCyIRwAswUcALQdHAC1fRwAtnUcALdtHACoYR8AqWEfAKphHwCrYR8ArNkfAK3ZHwCuyR8Ar8EfAO5yAIDycgCA9nIAgPpyAID+cgCAAnMAgAZzAIAKcwCADnMAgBJzAIC+AAQAo1EdABZzAICleR0AppUCABpzAIAecwCAInMAgKqBAgCrgQIArPECAK39AgCu9QIAr+kCACpzAIDh9AEALnMAgON8AQCATQAAgXUAAIJ9AAAycwCAhsAEAIekBAA2cwCAOnMAgD5zAIBCcwCARnMAgO+MAgCoSQIAqUkCAKpdAgCrVQIArHkCAK15AgCuvQIAr7UCAISgBQBKcwCATnMAgFJzAIC+vAQAVnMAgFpzAIBecwCAuC0BALk1AQC6PQEAuzUBALwtAQC91QEAvt0BAL/NAQCwzQIAsdUCALLdAgCz1QIAtM0CALUVAQC2HQEAtxUBAOGEHgDjbB8A41wfAOFYHgBicwCAZnMAgGpzAIBucwCAcnMAgHZzAIB6cwCAfnMAgOkAAADv9B4A70weAIJzAICzlQIAhnMAgIpzAICOcwCAknMAgLa5AgC1sQIAmnMAgLtRAgC6SQIAhsgEAIesBAC/kQEAvkkCAL1BAgC8SQIAJnMAgKNRBQCecwCAlnMAgKZ9BQCicwCApnMAgKV1BQCqjQUAq5UFAKpzAICucwCAro0FAK9VBgCsjQUArYUFAICJBwCBiQcAgpkHALORBgCycwCAtbkGALapBgC2cwCAunMAgL5zAIC6TQcAu0UHALxdBwC9QQcAvkEHAL9BBwCoQQYAqU0GAKpVBgCrZQYArH0GAK1lBgCubQYAr2UGAMJzAIDGcwCAynMAgM5zAIDScwCA1nMAgNpzAIDecwCAuFkHALlZBwC6aQcAu2kHALx5BwC9eQcAvmUHAL8ZBwCwxQcAsc0HALLFBwCz2QcAtMkHALXJBwC2aQcAt2kHAKPdBwDicwCA5nMAgOpzAIDucwCApuUHAKX1BwDycwCAqwkGAKoBBgD2cwCA+nMAgK8NBgCuDQYArQ0GAKwRBgCAbQAAgQkAAIIZAAD+cwCAAnQAgISYAQC+kAEABnQAgIbAAACH5AEACnQAgA50AIASdACAFnQAgBp0AIAedACAqF0GAKmNAQCqnQEAq5UBAKy5AQCtuQEArskBAK/BAQCEoAAAInQAgCZ0AIAqdACALnQAgDJ0AIA2dACAOnQAgLh5AQC5eQEAus0AALvFAAC83QAAvcUAAL7FAAC/9QAAsIEBALGBAQCySQEAs0kBALRZAQC1WQEAtkkBALdJAQCzFQIAPnQAgEJ0AIBGdACASnQAgLY5AgC1MQIATnQAgLtFAgC6RQIAUnQAgFZ0AIC/nQIAvp0CAL2dAgC8nQIAhXw+AKNRAgBadACAXnQAgKZ9AgBidACAZnQAgKV1AgCqAQIAqwECAGp0AIBudACArtkCAK/ZAgCs2QIArdkCAIDpAACB6QAAggUAAHJ0AIC+AAwAenQAgIeoAwCGvAwAfnQAgIJ0AICGdACAinQAgI50AICSdACAlnQAgJp0AICedACAonQAgKZ0AICqdACA42ABAK50AIDhoAEAsnQAgO+IAgC2dACAunQAgL50AIDCdACAxnQAgMp0AIDOdACAqGkCAKlpAgCqeQIAq3kCAKxpAgCtaQIArr0CAK+1AgC+rAwA0nQAgNZ0AIDadACAgB0AAIEJAACCqQAA3nQAgLhRAQC5WQEAumEBALthAQC8GQEAvRkBAL4NAQC/BQEAsM0CALHVAgCy3QIAs9UCALTNAgC1cQEAtnEBALdxAQDjxAAA4XwHAOF4BgDjvAYA4nQAgIQYDQCGuAwAhzwNAL4sDwDqdACA7nQAgPJ0AIDvEAAA9nQAgPp0AIDvdAYA/nQAgAJ1AIAGdQCAs70CAAp1AIC1rQIAtqUCAA51AIASdQCAFnUAgLpFAgC7XQIAvEUCAL1NAgC+RQIAv/kBAHZ0AIClfQ0ApnUNAOZ0AIAadQCAHnUAgCJ1AICjbQ0ArJUNAK2dDQCulQ0ArykOACZ1AIAqdQCAqpUNAKuNDQCz5Q4ALnUAgDJ1AIA2dQCAOnUAgLblDgC19Q4APnUAgLuhDgC62Q4AQnUAgEZ1AIC/pQ4AvrkOAL2xDgC8uQ4AqBUOAKklDgCqLQ4AqyUOAKw9DgCtJQ4Ari0OAK8lDgCADQAAgRUAAIIdAABKdQCATnUAgFJ1AICEMAMAVnUAgLgpDgC5KQ4AujkOALs5DgC8KQ4AvSkOAL79DwC/9Q8AsF0OALElDgCyLQ4AsyUOALQ9DgC1IQ4AtiUOALcZDgCjpQ8AWnUAgIYoAQCHTAEAXnUAgKalDwCltQ8AYnUAgKvhDwCqmQ8AZnUAgGp1AICv5Q8ArvkPAK3xDwCs+Q8AbnUAgLPpDgBydQCAdnUAgLaRDgB6dQCAfnUAgLXlDgC6sQ4Au7kOAIJ1AICGdQCAvmEBAL9hAQC8mQ4AvZkOAKglDgCpLQ4AqiUOAKs5DgCsKQ4ArVUOAK5dDgCvVQ4AinUAgI51AICSdQCAlnUAgJp1AICedQCAonUAgKZ1AIC49QEAuYEBALqBAQC7gQEAvIEBAL2JAQC+sQEAv7EBALAxDgCxOQ4AsgkOALMJDgC04QEAteEBALbhAQC3zQEAo60NAKp1AICudQCAsnUAgLZ1AICm1Q0ApaENALp1AICr/Q0AqvUNAL51AIDCdQCAryUCAK4lAgCt3Q0ArN0NAIBdAACBbQAAgmUAALNRAwC+nAMAtXkDALYZAwDKdQCAhOACAM51AIC6PQMAuzUDALwZAwC9GQMAvtkDAL/ZAwCohQMAqZUDAKqVAwCrpQMArL0DAK3VAwCu0QMAr9EDAIYABACHNAMAv6AzANJ1AIDWdQCA2nUAgN51AIDidQCAuHEDALlxAwC6cQMAu3EDALzVAAC93QAAvtUAAL/NAACwtQMAsb0DALKBAwCzgQMAtFEDALVRAwC2UQMAt1EDAO+oAwDmdQCA6nUAgO51AICEHAIA8nUAgPZ1AID6dQCAviwFAP51AIACdgCABnYAgONAAwAKdgCA4SgAAA52AICjXQIAEnYAgBZ2AIAadgCAHnYAgKYVAgCldQIAInYAgKs5AgCqMQIAJnYAgCp2AICv1QIArtUCAK0VAgCsFQIA4ygBAOEADwDhCA4A4wgOAID9AACBCQAAgjkAAC52AIAydgCAOnYAgD52AIBCdgCA7+gOAEZ2AIBKdgCA72QOALNtAQBOdgCAhugEAIcMBQBSdgCAtm0BALVtAQBWdgCAu+0AALrtAABadgCAXnYAgL/VAAC+6QAAveEAALzpAACoXQYAqWEGAKqlBgCrvQYArKUGAK2tBgCupQYArxkHADZ2AIBidgCAZnYAgGp2AIBudgCAcnYAgHZ2AIB6dgCAuHUHALl5BwC6DQcAuwUHALwdBwC9BQcAvgUHAL81BwCwaQcAsWkHALJ9BwCzdQcAtG0HALVRBwC2UQcAt1EHAKMtBgB+dgCAgnYAgIZ2AICKdgCApi0GAKUtBgCOdgCAq60HAKqtBwCSdgCAlnYAgK+VBwCuqQcAraEHAKypBwCADQAAgRUAAIIdAACadgCAnnYAgKJ2AICEVAMAvlwAAKZ2AICqdgCAhugAAIdMAwCudgCAsnYAgLZ2AIC6dgCAvnYAgOMEBADCdgCA4bQFAMZ2AIDKdgCAznYAgNJ2AIDWdgCA2nYAgN52AIDidgCA5nYAgO/sBADqdgCA7nYAgLPtBgDydgCA9nYAgPp2AID+dgCAtpEGALXhBgACdwCAu40GALqNBgAGdwCACncAgL9BAQC+WQEAvVEBALxZAQCoJQYAqS0GAKolBgCrOQYArCkGAK1RBgCuSQYAr0EGAIDNAACBCQAAghkAAA53AIASdwCAhCwBAL40AAAadwCAuP0BALlBAQC6QQEAu0EBALxBAQC9SQEAvnEBAL9xAQCwCQYAsQkGALLNAQCzxQEAtN0BALXFAQC2zQEAt8UBAIagPACHRAMAHncAgKOhBQAidwCApa0FAKbdBQAmdwCAKncAgL4oPACqwQUAq8EFAKwVAgCtHQIArhUCAK8NAgC2QQMALncAgDJ3AIC1sQIANncAgLOhAgA6dwCAPncAgL5FAwC/TQMAvHUDAL1NAwC6ZQMAu20DAEJ3AIBGdwCASncAgE53AIDGdQCAUncAgFZ3AIBadwCAXncAgGJ3AICoRQIAqVUCAKpdAgCrVQIArE0CAK21AwCusQMAr60DALDVAwCx3QMAstUDALPtAwC09QMAtf0DALb1AwC37QMAuNkDALnZAwC6rQMAu6UDALy9AwC9pQMAvqUDAL+VAwCj9QMAZncAgGp3AIBudwCAcncAgKYVAgCl5QMAdncAgKs5AgCqMQIAencAgH53AICvGQIArhECAK0ZAgCsIQIAgGkAAIFpAACCBQAAgncAgIp3AICOdwCAkncAgO8cAACEbAIA4ZQBAJZ3AIDjyAAAmncAgJ53AICGWDwAh1A9AKJ3AICmdwCAqncAgISEPQCudwCAsncAgLZ3AIDvuAEAvmw8AOF0BgC6dwCA42QBAL53AIDCdwCAxncAgMp3AICz0QEAzncAgNJ3AIDWdwCA2ncAgLaRAQC1+QEA3ncAgLu9AQC6vQEA4ncAgOZ3AIC/dQEAvnUBAL2FAQC8hQEAqL09AKkNPgCqGT4AqxE+AKwxPgCtUT4ArlE+AK9NPgCGdwCAgh0AAIEdAACAHQAA6ncAgO53AIDydwCA9ncAgLjVPgC53T4AutU+ALtJPwC8WT8AvVk/AL5JPwC/QT8AsDk+ALE5PgCyET4AsxE+ALTxPgC18T4AtvU+ALftPgCjkT4A+ncAgIYoAACHwAMA/ncAgKbRPgCluT4AAngAgKv9PgCq/T4ABngAgAp4AICvNT4ArjU+AK3FPgCsxT4ADngAgLOdPwASeACAFngAgLalPwAaeACAHngAgLWtPwC6aT8Au3U/ACJ4AIAmeACAvlk/AL9FPwC8bT8AvWU/ACp4AIAueACAMngAgDZ4AIDjYDwAOngAgOEAPQA+eACA7/w9AEJ4AIBGeACASngAgE54AIBSeACAVngAgFp4AICjGT4AghkAAIEZAACAcQAAXngAgKYhPgClKT4AYngAgKvxPgCq7T4AhCQBAL4kAQCvwT4Art0+AK3hPgCs6T4AqNE+AKnRPgCq0T4Aq+U+AKzhPgCt4T4Arhk+AK8ZPgCGAAAAh4QAAGp4AIBueACAcngAgHZ4AIB6eACAfngAgLh9PgC5AT4AugE+ALsBPgC8AT4AvQk+AL4xPgC/MT4AsGk+ALF1PgCyfT4As3U+ALRZPgC1RT4Atk0+ALdFPgCohQIAqZUCAKqVAgCrpQIArL0CAK3VAgCu0QIAr9ECAIJ4AICGeACAingAgL8k5gGOeACAkngAgJZ4AICaeACAuFUDALlZAwC6bQMAu2UDALx9AwC9ZQMAvm0DAL9lAwCwtQIAsb0CALKBAgCzgQIAtHEDALVxAwC2cQMAt3EDALMdAgCeeACAongAgKZ4AICEiAMAtlUCALU1AgAWdwCAu3kCALpxAgCqeACArngAgL+1AwC+tQMAvVUCALxVAgCyeACAo1kCALZ4AIC6eACAphECAL54AIDCeACApXECAKo1AgCrPQIAxngAgMp4AICu8QMAr/EDAKwRAgCtEQIAqKkCAKmpAgCquQIAq7kCAKypAgCtqQIArjkBAK85AQCAzQEAgQkAAIIZAADOeACA0ngAgL64BQDaeACA3ngAgLjpAQC56QEAuokBALuFAQC8nQEAvYEBAL6BAQC/tQEAsEkBALFVAQCyXQEAs1UBALRNAQC18QEAtvEBALfxAQDvFAAA4ngAgIaoBQCH3AUA5ngAgIRYBADqeACA78Q+AO54AIDhxD4A8ngAgOMwPgDjyAAA9ngAgOEoAQD6eACAtn0CAP54AIACeQCAtXUCAAZ5AICzZQIACnkAgA55AIC+3QEAv2EBALzdAQC91QEAutkBALvFAQASeQCAFnkAgKOxBQDWeACAGnkAgB55AIAieQCApqkFAKWhBQAmeQCAqxEGAKoNBgAqeQCALnkAgK+1BgCuCQYArQEGAKwJBgAyeQCANnkAgDp5AIA+eQCAgBkAAIEZAACCBQAAQnkAgL5sAwBGeQCAhsgAAIccAwBKeQCATnkAgFJ5AIBWeQCAqLkHAKm5BwCqDQcAqx0HAKwJBwCtNQcArjEHAK8pBwCEqAMAWnkAgF55AIBieQCAZnkAgGp5AIBueQCAcnkAgLjJAAC5yQAAutkAALvRAAC8+QAAvfkAAL6ZAAC/mQAAsF0HALEhBwCyIQcAsz0HALQpBwC1KQcAtgEHALcBBwCzhQYAdnkAgHp5AIB+eQCAgnkAgLa1BgC1gQYAhnkAgLvlBgC6mQYAinkAgI55AIC/7QYAvu0GAL3pBgC89QYAknkAgJZ5AICaeQCAnnkAgKJ5AICmeQCAqnkAgO+QBACueQCA4dwGALJ5AIDj7AUAgCkAAIEVAACCEQAAvnwBAKMFBgC6eQCAhigAAIdMAQC+eQCApjUGAKUBBgDCeQCAq2UGAKoZBgDGeQCAynkAgK9tBgCubQYArWkGAKx1BgDOeQCAs70BANJ5AIDWeQCAtnkBANp5AIDeeQCAtXkBALpVAQC7XQEA4nkAgOZ5AIC++QAAv/kAALxFAQC9+QAAqHECAKlxAgCqcQIAq3ECAKy1AgCtvQIArrUCAK+tAgCE7AwA6nkAgO55AIDyeQCA9nkAgPp5AID+eQCAAnoAgLhpAwC5aQMAugkDALsJAwC8GQMAvRkDAL4JAwC/CQMAsNUCALHdAgCy1QIAs2kDALR5AwC1eQMAtmkDALdhAwAGegCACnoAgA56AICj9QIAEnoAgKUxAgCmMQIAFnoAgBp6AIAeegCAqh0CAKsVAgCsDQIArbEDAK6xAwCvsQMAgGEAAIFhAACCBQAAInoAgIbwDACHYAMAvhAMACp6AIBmeACALnoAgDJ6AIA2egCAOnoAgD56AIBCegCARnoAgKiFAgCplQIAqpUCAKulAgCsvQIArdUCAK7RAgCv0QIASnoAgE56AIBSegCAVnoAgFp6AIBeegCAYnoAgGZ6AIC4dQEAuX0BALp1AQC7zQEAvNUBAL3dAQC+yQEAv8EBALC1AgCxvQIAsoECALOBAgC0VQEAtV0BALZVAQC3TQEA4RAGAIRIDADjDAYAanoAgISYDABuegCAcnoAgHZ6AIB6egCAfnoAgIJ6AICGegCAgXUAAIB1AADvIAEAgnUAAIp6AICOegCAknoAgL7ADACFtA4A4RACAO9cAADjABYA4ZABAJp6AIDjWAEA7zwHAJ56AICiegCAhgAIAIe4DACznQ0AJnoAgKZ6AICqegCArnoAgLbVDQC1tQ0AsnoAgLv5DQC68Q0AtnoAgLp6AIC/GQ4AvhEOAL3VDQC81Q0AvnoAgKPZDQDCegCAxnoAgKaRDQDKegCAznoAgKXxDQCqtQ0Aq70NANJ6AIDWegCArlUOAK9dDgCskQ0ArZENAKhdDgCpYQ4AqmEOAKthDgCsYQ4ArWEOAK5hDgCvYQ4A2noAgN56AIDiegCA5noAgOp6AIDuegCA8noAgPZ6AIC4TQ8AuVEPALpRDwC7UQ8AvHEPAL1xDwC+cQ8Av3EPALDBDwCxwQ8AssEPALPBDwC0wQ8AtcEPALbBDwC3wQ8As+kPAPp6AIC+gAEA/noAgJZ6AIC24Q8AtekPAAJ7AIC7BQ4AugUOAAp7AIAGewCAvwUOAL4FDgC9FQ4AvBUOAIFNAACAQQAA72gNAIJRAACG8AcAh9QBAA57AIASewCAFnsAgIRwAQAaewCAHnsAgOHgDgAiewCA40gNACZ7AICjaQ8AKnsAgC57AIAyewCANnsAgKZhDwClaQ8AOnsAgKuFDgCqhQ4APnsAgEJ7AICvhQ4AroUOAK2VDgCslQ4ARnsAgLMxDgBKewCATnsAgLbBAQBSewCAVnsAgLXRAQC6zQEAu6UBAFp7AIBeewCAvqUBAL+tAQC8sQEAvbEBAI/dJgCj8Q0AYnsAgGZ7AICmAQIAansAgG57AIClEQIAqg0CAKtlAgByewCAviAEAK5lAgCvbQIArHECAK1xAgCfoQwAnnkKAJ1pCgCc0QgAm7E2AJp1NgCZ0TQAmOEyAJdtMgCWZTIAlTU/AJRhPgCTcT4AkjU7AJFxOgCQeToAgJUAAIGdAACCoQAAensAgO9EAgDhdA8AfnsAgOMcDwDj1AEAgnsAgOHgAQDvXAEAo7UCAKJBAACh3Q4AoLkOALWpAwCGewCAhMAEALahAwCG8AUAh+QEALOFAwCKewCAvXEDALxpAwC/QQMAvnEDAI57AIC2eQCAu3EDALp5AwCC3ScAgwE7AL6EBwC+wAYAhhE/AIcZPwCEETsAhV06AIp9PgCLJTMAknsAgJZ7AICOuTUAjxU3AIw1MwCNgTMAkqE3AJPZCQC+xBkAmnsAgJaxDQCXUQ8AlHkLAJVhCwCaBQ8Am5EBAJ57AICiewCApnsAgN0AAACcfQMAqnsAgOFIDwCuewCA4xwOALJ7AIC2ewCAunsAgL57AIDCewCAsUEXALChFwCzqesBsgHoAbUB7AG0EesB74wOAMZ7AICpxR8AqAEcAKsBEACqkR8ArdkTAKzREwCv2RcArgUTAKHxAgDKewCAo8kHAKLBAgClARgApGUHAKehGwCm+RsAqCkFAKldBQCqVQUAq20FAKx5BQCteQUArm0FAK9hBQB2ewCAznsAgNJ7AIDWewCAgA0AAIGxAACCsQAA2nsAgLiJBQC5iQUAup0FALuVBQC8uQUAvbkFAL5RBgC/UQYAsOUFALHtBQCy5QUAs/0FALTtBQC13QUAttUFALe9BQCj3QUA3nsAgOJ7AICEDAAA5nsAgKb5BQCl8QUA6nsAgKspBQCqIQUAhpgAAIegAACvGQUArikFAK0pBQCsMQUA7nsAgLNhBgDyewCA9nsAgLYhBgD6ewCA/nsAgLUBBgC6rQcAu40HAAJ8AIAGfACAvo0HAL9xBwC8lQcAvY0HAL65BQC/uQUAvLkFAL25BQC6uQUAu7kFALi5BQC5uQUAtkkFALdJBQC0fQUAtXUFALJ5BQCzeQUAsBUFALF9BQCuXQUAr20FAKxFBQCtXQUAqqUKAKtdBQCovQoAqa0KAAp8AIAOfACAEnwAgBZ8AIAafACAHnwAgCJ8AIAmfACAqA0HAKkdBwCqLQcAq0kHAKxNBwCtZQcArrEGAK+xBgAqfACALnwAgDJ8AIA2fACAOnwAgD58AIBCfACARnwAgLhVBgC5XQYAulUGALtxBgC8NQYAvfEBAL7xAQC/8QEAsK0GALGNBgCyhQYAs50GALSNBgC1cQYAtnUGALdtBgCjpQQAgi0AAIEVAACAHQAASnwAgKblBAClxQQATnwAgKtJBQCqaQUAUnwAgFp8AICvtQUArkkFAK1JBQCsUQUAhmAcAIcIAwBefACAs4UCAGJ8AIC1gQIAtoECAGZ8AIBqfACAbnwAgLoJAwC7CQMAvBkDAL0ZAwC+CQMAvwkDAKxVAgCtXQIArmECAK9hAgCoDQIAqVUCAKpRAgCrUQIAhKwDAHJ8AIB2fACAenwAgIT8HQB+fACAgnwAgIZ8AIC8cQMAvXEDAL5xAwC/cQMAuHEDALlxAwC6cQMAu3EDALSRAwC1kQMAtpEDALeRAwCwkQMAsZEDALKRAwCzkQMAinwAgI58AICSfACAlnwAgJp8AIDhpAEAnnwAgOOAAQC+aBwAonwAgKZ8AIDv2AYAqnwAgK58AICyfACAtnwAgKOJAwCCLQAAgRUAAIAdAAC6fACApo0DAKWNAwC+fACAqwUCAKoFAgDCfACAynwAgK8FAgCuBQIArRUCAKwVAgCGIBwAh8QdAM58AIDSfACA1nwAgNp8AIDefACA72wGAOJ8AIDhbAcA5nwAgON0BwDqfACA7nwAgPJ8AID2fACAs5EBAPp8AID+fACAAn0AgAZ9AIC2sQEAtbkBAAp9AIC7VQEAukkBAA59AIASfQCAv/UAAL71AAC9RQEAvEUBAKNRHgDGfACAFn0AgBp9AIAefQCApnEeAKV5HgAifQCAq5UeAKqJHgAmfQCAKn0AgK81HwCuNR8ArYUeAKyFHgCAbQAAgRUAAIIdAADv/BkALn0AgDJ9AIA2fQCAOn0AgIbAAACHrAMAPn0AgEJ9AIBGfQCA4SwcAEp9AIDjzBwAqK0eAKnNHgCq2R4Aq9EeAKzxHgCt8R4Arj0eAK81HgCE7AAATn0AgFJ9AIBWfQCAWn0AgF59AIBifQCAZn0AgLjRHwC53R8Auu0fALvlHwC84R8AveEfAL7hHwC/4R8AsE0eALFRHgCyUR4As1EeALTxHwC18R8AtvEfALfxHwCobR4AqY0eAKqFHgCrnR4ArIUeAK2NHgCuuR4Ar7UeAGp9AIBufQCAcn0AgHZ9AIB6fQCAfn0AgIJ9AICGfQCAuJ0eALmtHgC6pR4Au0UBALxdAQC9RQEAvkUBAL91AQCw0R4AsdEeALLRHgCz0R4AtLUeALW9HgC2tR4At60eALMNHgCKfQCAjn0AgJJ9AICWfQCAtg0eALUNHgCafQCAuxUeALoVHgCefQCAon0AgL95HgC+cR4AvQUeALwFHgCCbQAAo0keAIBVAACBZQAApkkeAL6cAQCqfQCApUkeAKpRHgCrUR4Ah3wAAIZMAACuNR4Arz0eAKxBHgCtQR4AqF0CAKltAgCqZQIAq30CAKxpAgCtsQIArrECAK+xAgCE7AQArn0AgLJ9AIC2fQCAun0AgL59AIDCfQCAxn0AgLhxAwC5cQMAunEDALtxAwC81QMAvd0DAL7VAwC/zQMAsNECALHRAgCy0QIAs9ECALRRAwC1UQMAtlEDALdRAwCz7QIAyn0AgM59AIC+gAQA0n0AgLYxAgC14QIA1n0AgLsVAgC6FQIA2n0AgN59AIC/lQMAvpUDAL0FAgC8BQIA4n0AgKOpAgDmfQCA6n0AgKZ1AgDufQCA8n0AgKWlAgCqUQIAq1ECAPZ9AID6fQCArtEDAK/RAwCsQQIArUECAKjZAgCpIQEAqiEBAKshAQCsIQEArSEBAK4hAQCvIQEA/n0AgAJ+AIAGfgCAviAEAAp+AIAOfgCAEn4AgBp+AIC4jQEAuZEBALqRAQC7pQEAvL0BAL11AAC+fQAAv3UAALDlAQCx7QEAsvkBALPxAQC02QEAtdkBALa5AQC3tQEA4RgeAB5+AIDjKB8AIn4AgIGlAACApQAAJn4AgIKlAACGAAQAh/QFACp+AIAufgCAMn4AgDZ+AIDvYB4AOn4AgD5+AIBCfgCAhfD0AUZ+AIBKfgCA42QBAE5+AIDhpAEAUn4AgO/IAABWfgCAWn4AgFZ8AICE/AUAXn4AgGJ+AICzKQYAFn4AgGZ+AIBqfgCAbn4AgLYhBgC1KQYAcn4AgLupBgC6oQYAdn4AgHp+AIC/nQYAvp0GAL2lBgC8rQYA4bQHAH5+AIDjeAQAgn4AgIB9AACBEQAAghUAAIZ+AICGwAAAh1gDAIp+AICOfgCAkn4AgJZ+AIDvDAQAmn4AgKOpBgCefgCAon4AgKZ+AICqfgCApqEGAKWpBgCufgCAqykGAKohBgCyfgCAtn4AgK8dBgCuHQYArSUGAKwtBgC6fgCAs0kHAL5+AIDCfgCAtn0HAMZ+AIDKfgCAtXUHALpdBwC7JQcAzn4AgNJ+AIC+IQcAvy0HALw9BwC9MQcAqD0GAKmBBgCqhQYAq5UGAKy5BgCtuQYArqkGAK+pBgDWfgCA2n4AgN5+AIDifgCA5n4AgIK5AACBsQAAgLkAALitBgC5vQYAurUGALtFAQC8XQEAvUUBAL5FAQC/dQEAsN0GALGlBgCyrQYAs6EGALShBgC1rQYAtpkGALeVBgCjDQYA6n4AgO5+AIDyfgCAhJgCAKY5BgClMQYAvpwBAKthBgCqGQYAhggAAId8AQCvaQYArmUGAK11BgCseQYA+n4AgLO1AQD+fgCAAn8AgLZVAQAGfwCACn8AgLWhAQC6cQEAu3kBAA5/AIASfwCAvjEBAL89AQC8UQEAvVEBAKhpAgCpaQIAqnkCAKt5AgCsbQIArZECAK6RAgCvkQIAFn8AgBp/AIAefwCAIn8AgCZ/AIAqfwCALn8AgDJ/AIC4mQIAua0CALqlAgC7bQMAvHUDAL19AwC+dQMAv20DALDxAgCx+QIAssECALPBAgC0sQIAtb0CALa1AgC3qQIANn8AgDp/AIA+fwCAo/0CAEJ/AICl6QIAph0CAEZ/AIBKfwCATn8AgKo5AgCrMQIArBkCAK0ZAgCueQIAr3UCAFJ/AIBWfwCAWn8AgIQADACAGQAAgQkAAII5AABefwCAYn8AgGp/AIBufwCAvuAMAHJ/AIB2fwCAhlgNAIcMAwCowQIAqc0CAKrFAgCr2QIArMkCAK39AgCu9QIArz0BAHp/AIB+fwCAgn8AgIZ/AICKfwCAjn8AgJJ/AIC+MAwAuMUBALnNAQC62QEAu9EBALzxAQC98QEAvpkBAL+ZAQCwRQEAsU0BALJFAQCzXQEAtEUBALVNAQC2RQEAt/0BAOE4BgCWfwCA42wGAJp/AICefwCAon8AgKZ/AICqfwCAhKgNAK5/AICyfwCAtn8AgL6wDwC6fwCA72wGAL5/AIDCfwCApn0AgMZ/AIDKfwCA41AAAM5/AIDhoAEA0n8AgO+EAADafwCAhyANAIZMDwCAPQAAgSEAAIIlAADefwCAs80NAGZ/AIDWfwCA4n8AgOZ/AIC2/Q0AtcENAOp/AIC7CQ4AugEOAO5/AIDyfwCAvwkOAL4BDgC9CQ4AvBEOAPZ/AIDjmAwA+n8AgOH8DwD+fwCAAoAAgAaAAIAKgACADoAAgBKAAIAWgACAGoAAgB6AAIDvYAwAIoAAgCaAAICjTQ0AKoAAgC6AAIAygACANoAAgKZ9DQClQQ0AOoAAgKuJDgCqgQ4APoAAgEKAAICviQ4AroEOAK2JDgCskQ4Agm0AALM1DgCAVQAAgWUAALb1DwCE3AMARoAAgLX9DwC60Q8Au9EPAIYABACH3AAAvn0PAL9lDwC8wQ8AvXkPAKjlDwCp7Q8AqvkPAKv5DwCsMQ4ArTEOAK4xDgCvMQ4ASoAAgE6AAIBSgACAVoAAgFqAAIBegACAYoAAgGaAAIC43Q4AueEOALrhDgC74Q4AvOUOAL3pDgC+mQ4Av5UOALBRDgCxUQ4AslEOALPpDgC0/Q4AteUOALbtDgC35Q4Ao3EPAGqAAIBugACAcoAAgHaAAICmsQ4ApbkOAHqAAICrlQ4AqpUOAH6AAICCgACAryEOAK45DgCtPQ4ArIUOAIaAAICzyQEAioAAgI6AAIC2+QEAkoAAgJaAAIC1wQEAuqkBALu1AQCagACAnoAAgL6tAQC/lQEAvK0BAL2lAQCo5Q0AqfkNAKoFAgCrHQIArA0CAK09AgCuNQIAr10CAKKAAICmgACAqoAAgK6AAICAGQAAgRkAAIIFAACygACAuC0CALk1AgC6MQIAuzECALzVAgC93QIAvtUCAL/NAgCwKQIAsTUCALI9AgCzNQIAtC0CALUVAgC2HQIAtxUCALqAAICEnAIAvoAAgKOBAgDCgACApYkCAKaxAgDGgACAhiAEAIfUAwCq4QIAq/0CAKzlAgCt7QIAruUCAK/dAgC29QMAvkQDAIWM/QG1/QMAyoAAgLP9AwDOgACA0oAAgL59AwC/TQMAvGUDAL19AwC6dQMAu30DANaAAIDagACA3oAAgOKAAICEBAIAoyUCAOaAAIClJQIApi0CAOqAAIDugACA8oAAgKqtAgCrpQIArL0CAK2lAgCupQIAr5UCAPaAAID6gACA/oAAgAKBAIAGgQCA48ADAAqBAIDhrAEADoEAgO9YAwASgQCAFoEAgIANAACB5QAAgu0AABqBAIDhYA8A40ABAOM4DgDheA4AHoEAgCKBAIC+lAUAKoEAgIYABACHZAUALoEAgDKBAIA2gQCA7/wOAO98DgA6gQCAs1EBAD6BAID2fgCAQoEAgEaBAIC2DQEAtQkBAEqBAIC74QAAuhkBAE6BAIBSgQCAv9EAAL7pAAC96QAAvPkAALaAAIAmgQCAVoEAgFqBAIBegQCAYoEAgGaBAIBqgQCAqKEGAKmtBgCquQYAq7EGAKzhBgCt7QYAruUGAK/FBgCwvQYAsUUHALJNBwCzXQcAtE0HALV1BwC2fQcAtx0HALglBwC5LQcAuiUHALs9BwC8KQcAvRUHAL4RBwC/EQcAoxEGAG6BAIBygQCAdoEAgHqBAICmTQYApUkGAH6BAICroQcAqlkGAIKBAICGgQCAr5EHAK6pBwCtqQcArLkHAIANAACBFQAAgh0AAIqBAICOgQCAkoEAgISUAwC+lAMAloEAgJqBAICGyAAAh4wAAJ6BAICigQCApoEAgKqBAIConQYAqa0GAKqlBgCrvQYArK0GAK3RBgCu1QYAr80GAK6BAICygQCAtoEAgLqBAIC+gQCAwoEAgMaBAIDKgQCAuF0BALnBAQC6wQEAu8EBALzBAQC9yQEAvvEBAL/xAQCwvQYAsY0GALKFBgCzZQEAtH0BALVlAQC2bQEAt2UBALMtBgDOgQCA0oEAgNaBAIDagQCAtlEGALUlBgDegQCAu0kGALp5BgDigQCA5oEAgL+hAQC+uQEAvbEBALxRBgDqgQCAo2kGAO6BAIDygQCAphUGAPaBAID6gQCApWEGAKo9BgCrDQYA/oEAgAKCAICu/QEAr+UBAKwVBgCt9QEAutUHALvdBwC4wQcAucEHAL4xBAC/MQQAvPEHAL3xBwCyrQcAs7UHALCtBwCxpQcAtp0HALf1BwC0pQcAtZUHAKppBwCraQcAqGkHAKlpBwCuaQcAr2kHAKxpBwCtaQcAgLkDAIGNAwCChQMAhKgDAIZQ/AGHCAMAvjQDAAqCAICoZQIAqXUCAKp9AgCrdQIArG0CAK21AwCuvQMAr7UDAA6CAIASggCAFoIAgBqCAIAeggCAIoIAgCaCAIAqggCAuFEDALlZAwC6YQMAu2EDALwRAwC9HQMAvhUDAL8JAwCwzQMAsdUDALLdAwCz1QMAtM0DALVxAwC2cQMAt3EDAC6CAIAyggCAs/0DADaCAIC17QMAOoIAgD6CAIC2PQIAQoIAgEaCAIC7GQIAugECAL0JAgC8AQIAv70CAL4BAgBKggCAToIAgITE/QG+wPwBUoIAgFaCAIBaggCA79wDAF6CAIDhlAEAYoIAgOMQAwBmggCAgu0AAIHtAACA7QAA4TgGAOE8BwDjQAEA45QGAGqCAIBuggCAcoIAgHqCAICGgPwBh+j9AX6CAICCggCAhoIAgIqCAIDvnAEA79wGAKM1AwCOggCAkoIAgJaCAICaggCApvUCAKUlAwCeggCAq9ECAKrJAgCiggCApoIAgK91AgCuyQIArcECAKzJAgB2ggCAqoIAgK6CAICyggCA76T9AbaCAIC6ggCAvoIAgON4/QHCggCA4UD8AcaCAIDKggCAzoIAgNKCAIDWggCAs+X+AYItAACBFQAAgB0AANqCAIC25f4BtfX+Ad6CAIC7Yf8Butn+AeKCAICE5AMAv2n/Ab5h/wG9df8BvHn/Aaj9/gGpJf4Bqi3+Aasl/gGsPf4BrSX+Aa4t/gGvJf4BviwAAOaCAICGiAAAh+wAAOqCAIDuggCA8oIAgPaCAIC4gf8BuYH/AbqZ/wG7mf8BvIn/Ab21/wG+sf8Bv63/AbBd/gGx5f8Bsu3/AbPh/wG05f8Bte3/AbbZ/wG32f8Bo6X/AfqCAID+ggCAAoMAgAaDAICmpf8BpbX/AQqDAICrIf4Bqpn/AQ6DAIASgwCAryn+Aa4h/gGtNf4BrDn+ARaDAICz6f4BGoMAgB6DAIC2lf4BIoMAgCaDAIC16f4BurH+Abu5/gEqgwCALoMAgL51AQC/fQEAvJH+Ab2R/gGoHf4BqS3+Aaol/gGrPf4BrCX+Aa1R/gGuUf4Br1H+ATKDAIA2gwCAOoMAgD6DAIBCgwCARoMAgEqDAIBOgwCAuNkBALnZAQC67QEAu+EBALzhAQC94QEAvuEBAL/hAQCwMf4BsTn+AbIB/gGzAf4BtPUBALX9AQC29QEAt+kBAKOt/QFSgwCAvkwDAFqDAIBegwCAptH9AaWt/QFigwCAq/39Aar1/QFmgwCAaoMAgK85AgCuMQIArdX9AazV/QGA+QMAgfkDAIJNAACFdCAAboMAgITYAwCE1AQAcoMAgIZABACHVAMAdoMAgHqDAIB+gwCAgoMAgIaDAIC+8AUAqDECAKkxAgCqMQIAqzECAKyVAwCtnQMArpUDAK+NAwCKgwCAjoMAgJKDAICWgwCAhHwHAJqDAICegwCAooMAgLipAwC5qQMAumkDALtpAwC8eQMAvXkDAL5pAwC/aQMAsP0DALHNAwCyxQMAs60DALS5AwC1uQMAtq0DALelAwCmgwCAqoMAgK6DAICygwCAtoMAgLqDAIDv6AMAvoMAgOGQAQDCgwCA42wDAMqDAICAJQAAgSkAAIIdAADOgwCAs/kDANKDAICGaAcAh1wFANaDAIC2XQIAtV0CANqDAIC7SQIAunkCAN6DAIDigwCAvz0CAL49AgC9OQIAvFECAOaDAIDhPP4BvkAGAOPwAQDqgwCA7oMAgPKDAID2gwCA+oMAgP6DAIAChACABoIAgAaEAIAKhACADoQAgO/kAQAShACAFoQAgKNxAwAahACApdUCAB6EAIAihACAptUCACaEAIAqhACAq8ECAKrxAgCtsQIArNkCAK+1AgCutQIA4dz8AcaDAIDjUAQA74gEAID1BwCBCQAAgj0AAC6EAICEJAEAMoQAgDaEAIA6hACAPoQAgOFMBADv5BwA43QEALNdBgBChACAhgAMAIfgAwBGhACAtgUGALV1BgBKhACAuxEGALoJBgBOhACAUoQAgL/VBgC+1QYAvQEGALwJBgCojQYAqZUGAKqVBgCrpQYArL0GAK3FBgCuxQYAr/UGAFaEAIBahACAXoQAgGKEAIBmhACAaoQAgG6EAIByhACAuHUGALl9BgC6dQYAu80HALzVBwC93QcAvtUHAL/NBwCwjQYAsZUGALKdBgCzlQYAtFEGALVRBgC2UQYAt1EGAKMdBwCPFewBdoQAgHqEAIB+hACApkUHAKU1BwCChACAq1EHAKpJBwCGhACAioQAgK+VBwCulQcArUEHAKxJBwCeRfkBn6X5AZyR/QGdTfkBmlX9AZtd/QGYBfEBmZX+AZal8gGXYfEBlG31AZU19QGS4ekBk4X2AZBV7AGRXekBsbEdALClHQCziRkAskEcALUBJAC09RkAjoQAgJKEAICWhACAgqkDAIGhAwCAaQAAohUFAKMFAgCgFQYAob0FAKHFAQCahACAo80NAKLlAQClAQgApN0NAKfRCQCm2QkAqQEUAKilCACrxRQAqs0VAK3REQCsARAArwEcAK51EQCCEe8BgynvAZ6EAICihACAhuH1AYcR9gGEOeoBhY3qAYp59gGL4fEBvqQMAKqEAICO+f0BjzH+AYw98gGNYfIBkkn+AZOd/gGHCAwAhmwMAJax+gGX+QUAlFn6AZVZ+gGaYQYAm8EGAK6EAICyhACAtoQAgLqEAICcyQEAvoQAgKitBQCpuQUAqs0FAKvdBQCszQUArf0FAK71BQCvHQUAwoQAgMaEAIDKhACAzoQAgNKEAIDWhACA2oQAgN6EAIC4dQUAuX0FALoJBQC7CQUAvB0FAL0BBQC+AQUAvz0FALBxBQCxcQUAsnEFALNxBQC0UQUAtVEFALZRBQC3TQUAs0UEAOKEAIDmhACA6oQAgO6EAIC2fQQAtUUEAPKEAIC7tQQAurUEAPaEAID6hACAv5UEAL6VBAC9pQQAvKUEAP6EAICjAQQAAoUAgAaFAICmOQQACoUAgA6FAIClAQQAqvEEAKvxBAAShQCAhOwNAK7RBACv0QQArOEEAK3hBADh0AYAhAwMAOMoBwC+AAwAGoUAgO9EAwCGuAwAhywNAB6FAIDjlAEAIoUAgOH8AQBWgwCAJoUAgO/IBgAqhQCALoUAgDKFAICzjQMANoUAgLWNAwA6hQCAPoUAgLa1AwBChQCARoUAgLtBAwC6SQMAvUEDALxZAwC/QQMAvkkDAKNFDACmhACAFoUAgEqFAIBOhQCApn0MAKVFDABShQCAq4kMAKqBDABWhQCAWoUAgK+JDACugQwArYkMAKyRDACAFQ8AgR0PAIIhDwCzIQ4AXoUAgLUhDgC2JQ4AYoUAgGaFAIBqhQCAusEOALvBDgC8wQ4AvcEOAL7BDgC/wQ4AqK0OAKntDgCq5Q4Aq/0OAKzlDgCt6Q4ArjkOAK85DgBuhQCAcoUAgHaFAIB6hQCAgB0AAIEJAACCvQEAfoUAgLjNDwC51Q8AutUPALvlDwC8/Q8AvZUPAL6RDwC/kQ8AsEkOALFJDgCyWQ4As1kOALRJDgC1SQ4Atv0PALf1DwCjbQ8AgoUAgL6EAQCKhQCAjoUAgKZpDwClbQ8AkoUAgKuNDwCqjQ8AhogAAIdsAQCvjQ8Aro0PAK2NDwCsjQ8AloUAgLPtDgCahQCAnoUAgLaRDgCihQCApoUAgLXhDgC6tQ4Au70OAKqFAICuhQCAvn0BAL9lAQC8mQ4AvZkOAKgRDgCpJQ4AqiEOAKs5DgCsLQ4ArVUOAK5dDgCvUQ4AhKgAALKFAIC2hQCAuoUAgL6FAIDChQCAxoUAgMqFAIC47QEAuZUBALqVAQC7rQEAvLUBAL11AQC+fQEAv3UBALA1DgCxPQ4AsgkOALMJDgC0/QEAteUBALblAQC31QEAo6kNAM6FAIDShQCA1oUAgNqFAICm1Q0ApaUNAN6FAICr+Q0AqvENAOKFAIDmhQCAryECAK45AgCt3Q0ArN0NAIANAACBFQAAgh0AAOqFAIDuhQCA8oUAgIeQAwCGfAQAvuwEAPqFAID+hQCAAoYAgAaGAIAKhgCADoYAgBKGAICyLQ4AszUOALAtDgCxJQ4Ati0OALedDwC0LQ4AtSUOALq9DwC7jQ8AuKUPALm9DwC+LQ8AvxUPALyVDwC9JQ8AFoYAgBqGAIAehgCAIoYAgCaGAIAqhgCALoYAgDKGAICqpQ4Aq7UOAKjFDgCp3Q4Arp0OAK9VDgCspQ4ArZUOAKgNAgCpFQIAqhUCAKtNAgCsWQIArVkCAK5NAgCvRQIAhKgFADaGAIA6hgCAPoYAgIS4BABChgCARoYAgEqGAIC4/QIAuUEBALpBAQC7QQEAvEEBAL1JAQC+cQEAv3EBALAJAgCxCQIAss0CALPFAgC03QIAtcUCALbNAgC3xQIA4dQPAOMQDgDj9A4A4QwOAE6GAIBShgCAVoYAgFqGAIBehgCAYoYAgL4kBABqhgCA7AAAAO9EAADvzA4AboYAgIJlAACz2QIAgFUAAIFtAAC2nQIAcoYAgHaGAIC1lQIAuokCALuJAgCGqAQAh+AEAL5dAgC/RQIAvF0CAL1VAgCjHQUA9oUAgGaGAIB6hgCAfoYAgKZZBQClUQUAgoYAgKtNBQCqTQUAhoYAgIqGAICvgQUArpkFAK2RBQCsmQUAjoYAgLMpBgCShgCAloYAgLYpBgCahgCAnoYAgLUpBgC6pQYAu60GAKKGAICmhgCAvqUGAL+tBgC8tQYAva0GAKjlBgCp7QYAquUGAKv9BgCs5QYAre0GAK7lBgCvXQYAqoYAgK6GAICyhgCAtoYAgLqGAIC+hgCAwoYAgMaGAIC46QcAuekHALr9BwC79QcAvO0HAL1FBwC+TQcAv0UHALAlBgCxLQYAsiUGALM9BgC0JQYAtS0GALYlBgC32QcAo20HAIItAACBFQAAgB0AAMqGAICmbQcApW0HAM6GAICr6QcAquEHANKGAIC+oAEAr+kHAK7hBwCt6QcArPEHANaGAICzkQYAhugAAIcsAQC2QQEA2oYAgN6GAIC1UQEAuk0BALslAQDihgCA5oYAgL4lAQC/LQEAvDEBAL0xAQCwrQEAscUBALLBAQCzwQEAtMUBALXNAQC28QEAt/EBALgBAQC5AQEAugEBALsBAQC8AQEAvQEBAL4BAQC/AQEA6oYAgO6GAIDyhgCA9oYAgIaFAID6hgCA/oYAgAKHAICoTQYAqVkGAKo9BgCrNQYArP0BAK3lAQCu5QEAr9UBAKPVBQAGhwCACocAgA6HAIAShwCApgUCAKUVAgAWhwCAq2ECAKoJAgAahwCAHocAgK9pAgCuYQIArXUCAKx1AgAihwCAJocAgCqHAIAuhwCAMocAgOFkBQA2hwCA4+wFAIARAACBEQAAghEAAO/0BgA6hwCAPocAgEKHAIC+MAMAhMQCAEqHAICz4QMAhMAcALVRAwBOhwCAUocAgLZZAwBWhwCAWocAgLtxAwC6eQMAvbUAALxpAwC/tQAAvrUAAF6HAIDhlAEAYocAgONcAgCGcBwAh0QDAGaHAIBqhwCAbocAgHKHAIB2hwCAeocAgH6HAICChwCAhocAgO94AgCoVQIAqV0CAKphAgCrYQIArNECAK3RAgCu0QIAr9ECAIqHAICOhwCAkocAgJaHAICahwCAnocAgKKHAICmhwCAuGkBALlpAQC6CQEAuwkBALwZAQC9GQEAvgkBAL8FAQCwtQIAsb0CALK1AgCzaQEAtHkBALV5AQC2aQEAt2EBAOHEBwDjpAYA47gGAOF8BgCADQAAgTUAAII9AACqhwCArocAgLKHAIC+4B0AuocAgL6HAIDvYAAA7+gGAMKHAICjqQIAxocAgMqHAIDOhwCA0ocAgKYRAgClGQIA1ocAgKs5AgCqMQIAhkgcAIfMHACv/QEArv0BAK39AQCsIQIAqIUeAKmRHgCqkR4Aq60eAKy1HgCt1R4ArtEeAK/FHgC2hwCA2ocAgN6HAIDihwCA5ocAgOqHAIDuhwCA8ocAgLhhHwC5YR8AumEfALthHwC8YR8AvWEfAL5hHwC/YR8AsL0eALGFHgCyjR4As4UeALSdHgC1hR4Ato0eALeFHgCzGR4A9ocAgPqHAID+hwCAAogAgLZVHgC1PR4ABogAgLtBHgC6eR4ACogAgA6IAIC/QR4AvlkeAL1RHgC8WR4AEogAgKNdHgAWiACAGogAgKYRHgAeiACAIogAgKV5HgCqPR4AqwUeAISkAwC+qAMArh0eAK8FHgCsHR4ArRUeAKitHgCptR4AqrUeAKvJHgCs2R4ArdkeAK7JHgCvwR4AgO0BAIHxAQCC8QEAJogAgIaQAACHdAEAKogAgC6IAIC4yQEAuckBALrZAQC70QEAvPkBAL35AQC+mQEAv5UBALBFAQCxTQEAskUBALNdAQC0RQEAtU0BALZFAQC3+QEAsz0eADKIAIA2iACAOogAgD6IAIC2WR4AtVEeAEKIAIC7iQEAuoEBAEaIAIBKiACAv4kBAL6BAQC9iQEAvJEBAE6IAIBSiACAo3UeAFaIAIClGR4AWogAgF6IAICmER4ARocAgGKIAICrwQEAqskBAK3BAQCs2QEAr8EBAK7JAQBmiACAaogAgG6IAIByiACAdogAgIQYAgB6iACAfogAgIKIAICGiACAiogAgI6IAICSiACAmogAgJ6IAIC+cAMAgGkAAIFpAACCeQAAhAAEAIbwBACHdAMAoogAgO8MHwCmiACA4aweAKqIAIDj8B4ArogAgLKIAIC2iACAuogAgL6IAIDCiACAxogAgMqIAIDvVAIAzogAgNKIAIDWiACA46QCANqIAIDhgAEA3ogAgOKIAIDmiACA6ogAgO6IAICzRQMA8ogAgPaIAID6iACA/ogAgLZFAwC1VQMAAokAgLshAwC6SQMAvqAEAAqJAIC/KQMAviEDAL01AwC8OQMAqDkCAKk5AgCqjQIAq4UCAKydAgCthQIAroUCAK+1AgCA7QEAgfUBAIL1AQAOiQCAhpAEAIcEBQASiQCAFokAgLhFAQC5TQEAukUBALtdAQC8SQEAvUkBAL55AQC/eQEAsM0CALGlAgCyrQIAs6ECALSlAgC1rQIAtp0CALd9AQAaiQCAHokAgCKJAIAmiQCAKokAgC6JAIAyiQCA74gBAITsBADhVB4ANokAgONUAQA6iQCAPokAgEKJAIBGiQCAo0UCAEqJAIBOiQCAUokAgFaJAICmRQIApVUCAFqJAICrIQIAqkkCAF6JAIBiiQCArykCAK4hAgCtNQIArDkCAKg1BgCpPQYAqlEGAKttBgCseQYArWUGAK5tBgCvZQYABokAgGaJAIBqiQCAbokAgIAZAACBGQAAggUAAHKJAIC45QYAuekGALr5BgC7+QYAvOkGAL3pBgC+nQYAv5UGALAdBgCx5QYAsu0GALPlBgC0/QYAteEGALbhBgC34QYAs9kGAL7QAwB2iQCAeokAgH6JAIC25QYAtfEGAIKJAIC7IQYAutkGAIaYAACHeAMAvyUGAL45BgC9MQYAvDkGAIaJAICjnQYAiokAgI6JAICmoQYAkokAgJaJAICltQYAqp0GAKtlBgCaiQCAnokAgK59BgCvYQYArH0GAK11BgCo7QcAqSkGAKoxBgCrMQYArJEGAK2RBgCukQYAr5EGAKKJAICmiQCAqokAgK6JAICyiQCAtokAgLqJAIC+iQCAuIUGALmNBgC6hQYAu50GALyNBgC9vQYAvrUGAL95AQCw8QYAsfEGALLxBgCzxQYAtMEGALXBBgC2wQYAt8EGALO5BgDCiQCAxokAgMqJAIDOiQCAthEGALUZBgDSiQCAuzUGALo1BgDWiQCA2okAgL8FBgC+BQYAvREGALwlBgClQQYA3okAgOKJAICmSQYAgRUAAIB5AACj4QYAghUAAK1JBgCsfQYAr10GAK5dBgCENAEAlogAgKttBgCqbQYAvswDAOqJAICzlQIA7okAgLXZAgDyiQCA9okAgLbRAgCGgAwAhzgDALvFAgC6xQIAvRUDALwVAwC/FQMAvhUDAPqJAID+iQCA71gGAIRAAwACigCABooAgAqKAIAOigCAEooAgBaKAIAaigCAHooAgOE4BgAiigCA4yQGAL5wDACsSQIArUkCAK5dAgCvVQIAqB0CAKkFAgCqBQIAq10CAISoDAAmigCAKooAgC6KAIC+vA0AMooAgDaKAIA6igCAvE0DAL1VAwC+VQMAv2UDALjpAwC56QMAul0DALtVAwC0yQMAtckDALbZAwC32QMAsBkCALEZAgCy2QMAs9kDAD6KAIDj5AAAQooAgOG8AQBGigCAgj0AAIE9AACAPQAASooAgE6KAIBSigCAWooAgF6KAIDvzAMAYooAgGaKAICj3QMAaooAgIboDACHYA0AbooAgKaZAwClkQMAcooAgKuNAwCqjQMAdooAgHqKAICvXQIArl0CAK1dAgCsXQIAfooAgIKKAICGigCAiooAgI6KAICSigCAlooAgO/gAQCEvAwA4YwGAJqKAIDjHAYAnooAgKKKAICmigCAqooAgLPVAQCuigCAsooAgLaKAIC6igCAtpEBALWZAQC+igCAu70BALq9AQDCigCAyooAgL+dAQC+nQEAvZ0BALydAQCoBQ4AqQkOAKodDgCrFQ4ArFEOAK1RDgCuSQ4Ar0kOAFaKAICCzQ8AgfUPAID9DwDGigCAzooAgIYcAACHsAMAuOkOALnpDgC6/Q4Au/UOALztDgC9VQ8AvlEPAL9NDwCwOQ4AsTkOALIJDgCzCQ4AtBkOALUZDgC2DQ4At9kOAKOVDgDSigCA1ooAgNqKAIDeigCAptEOAKXZDgDiigCAq/0OAKr9DgDmigCA6ooAgK/dDgCu3Q4Ard0OAKzdDgDuigCAs/0PAPKKAID2igCAtoEPAPqKAID+igCAtZkPALqNDwC7ZQ8AAosAgAaLAIC+fQ8Av2UPALx9DwC9dQ8AqC0OAKk1DgCqMQ4AqzEOAKxVDgCtRQ4ArkUOAK91DgAKiwCADosAgBKLAIAWiwCAGosAgB6LAIAiiwCAJosAgLjpDgC59Q4Auv0OALv1DgC87Q4AvZEOAL6RDgC/kQ4AsA0OALHlDgCy7Q4As+UOALT9DgC15Q4Atu0OALflDgCjuQ4Agi0AAIEVAACAHQAAKosAgKbFDgCl3Q4ALosAgKshDgCqyQ4AMosAgL4sAQCvIQ4ArjkOAK0xDgCsOQ4AOosAgLZVAQC1RQEANosAgLNVAQA+iwCAhngAAIdcAAC/OQEAvjEBAL0lAQC8JQEAuzEBALpZAQDmiQCAQosAgEaLAIBKiwCAhAQDAKOJAgBOiwCApZkCAKaJAgBSiwCAvyg5AFaLAICqhQIAq+0CAKz5AgCt+QIAru0CAK/lAgDjWAIA78AOAOGIAQBaiwCAXosAgGKLAIBmiwCAaosAgG6LAIByiwCAdosAgHqLAIDvKAIA4ygOAH6LAIDhRA4AqbUCAKhpDQCrAQIAqgkCAK0BAgCsGQIArzECAK4BAgC+AAQAgosAgIaLAICKiwCAjosAgJKLAICWiwCAmosAgLnlAwC45QMAu+UDALrlAwC95QMAvOUDAL/lAwC+5QMAsSECALBJAgCzJQIAsiUCALUpAgC0IQIAtxUCALYVAgCowQIAqdECAKr1AgCrDQEArBUBAK0FAQCuBQEArzkBAJ6LAICiiwCAqosAgK6LAICyiwCAtosAgLqLAIC+iwCAuC0BALk9AQC67QEAu+UBALz9AQC95QEAvu0BAL/lAQCwLQEAsTUBALI9AQCzNQEAtC0BALUVAQC2HQEAtxUBAIA9AQCBpQAAgq0AAO/YAACGsAUAh9gFAMKLAIDv1A8AhGwEAOH0DgDGiwCA4xwPAMqLAIDhlAEAzosAgOMMDgCzPQIA0osAgNaLAIDaiwCA3osAgLbFAQC13QEA4osAgLuxAQC6qQEA5osAgOqLAIC/kQEAvqkBAL2hAQC8qQEAposAgO6LAICqRQYAq10GAKxFBgCtTQYArkUGAK99BgDyiwCA9osAgPqLAICj0QUA/osAgKUxBgCmKQYAAowAgAaMAICCHQAAgR0AAIAdAAAKjACADowAgBKMAIC+lAMAFowAgBqMAICGSAMAh8wDAB6MAIAijACAJowAgCqMAICoqQcAqakHAKq5BwCruQcArKkHAK2pBwCuAQcArzUHAC6MAIAyjACANowAgDqMAIA+jACAQowAgEaMAIBKjACAuC0HALnBAAC66QAAu+kAALz5AAC95QAAvuUAAL+dAACwUQcAsV0HALItBwCzJQcAtD0HALUlBwC2JQcAtxUHALMxBgBOjACAUowAgFaMAIBajACAtikGALUhBgBejACAu5kGALqVBgBijACAZowAgL/hBgC++QYAvfEGALz5BgBqjACAo3UGAG6MAIByjACApm0GAHaMAIB6jACApWUGAKrRBgCr3QYAfowAgIKMAICuvQYAr6UGAKy9BgCttQYAqOUBAKn1AQCq/QEAq/UBAKztAQCtNQEArj0BAK81AQCA+QAAgc0AAILFAACEYAEAvngBAIqMAICHrAAAhpABALjRAAC52QAAuuEAALvhAAC8kQAAvZ0AAL6VAAC/iQAAsE0BALFVAQCyXQEAs1UBALRNAQC18QAAtvEAALfxAACzdQIAjowAgJKMAICWjACAmowAgLa1AgC1ZQIAnowAgLuRAgC6iQIAoowAgKaMAIC/NQMAvokCAL2BAgC8iQIAqowAgKMxAgCujACAhMADAKbxAgCyjACAtowAgKUhAgCqzQIAq9UCALqMAIC+jACArs0CAK9xAwCszQIArcUCAKuNAACqjQAAqY0AAKg5AwCvvQAArr0AAK2FAACsjQAAqgAAAKsAAADCjACAxowAgMqMAIDOjACA0owAgNaMAIC7fQAAun0AALl9AAC4fQAAv90BAL7dAQC93QEAvN0BALO5AACysQAAsaEAALCtAAC3XQAAtl0AALWVAAC0lQAA2owAgN6MAIDijACA5owAgIE1AACADQAA6owAgII1AAC+rD0A7owAgPKMAICFaD0A+owAgP6MAICGODwAh8ACALNJAQACjQCA0AAAAAaNAIAKjQCAtkkBALVJAQAOjQCAuykBALolAQASjQCAFo0AgL8dAQC+HQEAvSEBALwpAQDjNDYA4QwGAOGwAgDjPAYAGo0AgB6NAIAijQCAJo0AgIQsPwC+oD8AKo0AgC6NAIDvfDcAMo0AgDaNAIDvGAEAOo0AgD6NAICGaD4Ah8w/AEKNAIBGjQCASo0AgO+UAABOjQCA4ZQBAFKNAIDjUAAAVo0AgILpPwCB6T8AgPE/AKMJPgCPASQA9owAgFqNAIBejQCApgk+AKUJPgBijQCAq2k+AKplPgBmjQCAao0AgK9dPgCuXT4ArWE+AKxpPgCeYTgAn3U4AJzBNACdtTkAmqU1AJt1NACYeTAAmXExAJYhLQCXhTEAlG0sAJVlLACSeSgAk6UtAJBRJACReSgAsQ0UALAFFACzARgAslUUALV5GAC0tRgAbo0AgHKNAIB2jQCAeo0AgH6NAICCjQCAotE8AKMlAQCgdTkAob08AKHJAACGjQCAowEEAKLlAAClHQQApPUEAKf5CACmAQgAqQEMAKhtCACrzQwAqs0MAK3REACsARAAr9URAK7ZEACCBSUAgy0lAIqNAICOjQCAhsEsAIcRLQCEHSkAhRUpAIopLQCLZSwAko0AgJaNAICOHTAAj8E0AIzZMACNHTEAkmE1AJPNNQCajQCAno0AgJZhOQCXmTgAlKE4AJV9OQCaYT0AmwU9AKKNAICmjQCAqo0AgK6NAICc6QAAso0AgLaNAIC6jQCAvo0AgMKNAICGjACAxo0AgMqNAIDOjQCAqJE+AKmRPgCq7T4Aq+E+AKzhPgCt6T4ArtE+AK/RPgCwUT4AsVE+ALJRPgCzUT4AtHk+ALV5PgC2bT4At2U+ALghPgC5IT4Aujk+ALs5PgC8KT4AvRU+AL4RPgC/DT4AgJkDAIGZAwCCBQAA0o0AgL5UAwDhsD0A2o0AgONAPgCEOAIA3o0AgOKNAIDv9D8A5o0AgOqNAICGmAQAhxwDALMFPQCECAQA7o0AgPKNAID2jQCAtgk9ALUJPQD6jQCAu/U9ALr1PQD+jQCAAo4AgL/dPQC+3T0AveU9ALzlPQAGjgCACo4AgKPNPQC+xAQApcE9AA6OAIASjgCApsE9ABaOAIAajgCAqz09AKo9PQCtLT0ArC09AK8VPQCuFT0AtmkCAB6OAIAijgCAtWkCACaOAICzSQIAKo4AgC6OAIC+qQMAv6kDALzBAwC9wQMAuvkDALv5AwAyjgCANo4AgKgtAwCpnQMAqpUDAKutAwCstQMArb0DAK61AwCv2QMAgA0AAIEVAACCHQAAOo4AgD6OAIBCjgCAh7QFAIacBAC4MQIAuTECALo1AgC7zQIAvNUCAL3dAgC+1QIAv8kCALBpAgCxaQIAskECALNBAgC0OQIAtTkCALYRAgC3EQIASo4AgOM0PgBOjgCA4aw+AFKOAIDvfAMAVo4AgFqOAIBejgCA45QDAGKOAIDhfD4AZo4AgO/oPgBqjgCAbo4AgHKOAIB2jgCAo1UDAHqOAICldQMAfo4AgIKOAICmdQMAho4AgIqOAICr5QIAquUCAK3dAgCs3QIAr7UCAK61AgCoGQYAqSEGAKohBgCrPQYArCUGAK1dBgCuVQYAr00GAEaOAICOjgCAko4AgJaOAICajgCAno4AgKKOAICmjgCAuOUGALmBBgC6gQYAu50GALyJBgC9iQYAvqEGAL+hBgCwPQYAsQ0GALIFBgCz7QYAtPUGALXhBgC24QYAt90GALOpBgCCLQAAgRUAAIAdAACqjgCAtt0GALWtBgCujgCAu8kGALr5BgCyjgCAhOADAL8lBgC+MQYAvTkGALzRBgC+iAMAo+0GANaNAIC2jgCAppkGALqOAIC+jgCApekGAKq9BgCrjQYAhkgAAIdsAACudQYAr2EGAKyVBgCtfQYAqIEGAKmNBgCqmQYAq5UGAKyNBgCttQYArrEGAK+tBgDCjgCAxo4AgMqOAIDOjgCA0o4AgNaOAIDajgCA3o4AgLilBgC5YQEAumEBALthAQC8YQEAvWEBAL5hAQC/YQEAsNkGALHZBgCyqQYAs6kGALS9BgC1oQYAtqEGALedBgCzEQYA4o4AgOaOAIDqjgCA7o4AgLY1BgC1BQYA8o4AgLsdBgC6HQYA9o4AgPqOAIC/ZQYAvnkGAL19BgC8fQYA/o4AgKNVBgACjwCABo8AgKZxBgAKjwCADo8AgKVBBgCqWQYAq1kGABKPAIAWjwCArj0GAK8hBgCsOQYArTkGAKjVAgCp3QIAqikDAKspAwCsOQMArTkDAK4pAwCvKQMAGo8AgB6PAIAijwCAKo8AgC6PAIAyjwCAvrgDADaPAIC47QMAuYUDALqBAwC7gQMAvIUDAL2NAwC+sQMAv7EDALBZAwCxWQMAsu0DALPlAwC0/QMAteUDALblAwC31QMAgKEAAIGhAACCoQAAvoAMADqPAICEmAIAPo8AgEKPAICGAAwAh/QDAEaPAIBKjwCATo8AgFKPAIBWjwCAhLADALPhAwBajwCAXo8AgGKPAIBmjwCAtvkDALXxAwBqjwCAu90DALrdAwBujwCAco8AgL9hAwC+eQMAvXEDALx5AwB2jwCAeo8AgH6PAICjLQIAgo8AgKU9AgCmNQIAho8AgIqPAICOjwCAqhECAKsRAgCstQIArb0CAK61AgCvrQIA48QDAOMQBwDhuAEA4WwHAIBxAACBcQAAggUAAJKPAICGwAwAh1QNAJqPAICejwCA77ADAO8ABwCijwCApo8AgKqPAICujwCAso8AgLaPAIC6jwCAvo8AgMKPAIDvpAEAhKANAOGABgDGjwCA4xABAMqPAIDOjwCA0o8AgNaPAICz9QEA2o8AgN6PAIDijwCA5o8AgLZNAQC1SQEA6o8AgLtRAQC6SQEA7o8AgPKPAIC/OQEAvjEBAL1BAQC8SQEAqC0OAKk1DgCqPQ4AqzEOAKyBDgCtjQ4AroUOAK+1DgCWjwCA9o8AgPqPAID+jwCAgBkAAIEZAACCBQAAApAAgLidDgC5rQ4AuqUOALtNDwC8VQ8AvV0PAL5JDwC/QQ8AsM0OALHVDgCy3Q4As9UOALS1DgC1vQ4AtrUOALetDgCjtQ4AvogDAAaQAIAKkACADpAAgKYNDgClCQ4AEpAAgKsRDgCqCQ4AhggAAIdsAwCveQ4ArnEOAK0BDgCsCQ4AFpAAgBqQAIAekACAs7UPACKQAIC1VQ8Atl0PACaPAIAmkACAKpAAgLp5DwC7eQ8AvGkPAL1dDwC+SQ8Av0kPAKhpDgCpaQ4AqnEOAKtxDgCskQ4ArZEOAK6RDgCvkQ4ALpAAgDKQAIA2kACAOpAAgD6QAIBCkACARpAAgEqQAIC4hQ4AuY0OALqFDgC7nQ4AvI0OAL29DgC+tQ4Av3kBALDxDgCx8Q4AsvEOALPFDgC0wQ4AtcEOALbBDgC3wQ4Ao/kOAE6QAIBSkACAVpAAgFqQAICmEQ4ApRkOAF6QAICrNQ4AqjUOAGKQAIBmkACArwUOAK4FDgCtEQ4ArCUOAIANAACBFQAAgh0AAGqQAIBukACAcpAAgISUAQC+lAEAhkAHAIf0AAB6kACAfpAAgIKQAICGkACAipAAgI6QAICojQIAqZUCAKqVAgCrzQIArNUCAK3dAgCuyQIAr/0CAJKQAICWkACAmpAAgJ6QAIC/ABQAopAAgKaQAICqkACAuH0DALnBAwC6wQMAu8EDALzBAwC9yQMAvvEDAL/xAwCwhQIAsUUDALJNAwCzRQMAtF0DALVFAwC2TQMAt0UDALMdAgCukACAspAAgLaQAIC6kACAtl0CALVdAgC+kACAu4EDALpBAgDCkACAxpAAgL+BAwC+mQMAvZEDALyZAwDKkACAo1kCAM6QAIDSkACAphkCANaQAIDakACApRkCAKoFAgCrxQMA3pAAgOKQAICu3QMAr8UDAKzdAwCt1QMA6pAAgOPMAACEBAIA4bwBAIDJAQCB/QEAgvUBAL4QBQDukACAvigEAPKQAID2kACA+pAAgO8QAAD+kACAApEAgIbgBACH9AIABpEAgAqRAIDj/A8ADpEAgOHgDwASkQCA7xQPABaRAIAakQCAHpEAgCKRAIAmkQCAKpEAgC6RAIAykQCANpEAgDqRAIA+kQCAQpEAgEaRAIBKkQCA7+ABAIUEEgDh3A4ATpEAgOMcDgCAKQAAgR0AAIIFAABSkQCAszECAFqRAICEzAUAXpEAgGKRAIC2KQIAtSECAGaRAIC7zQEAus0BAGqRAIBukQCAv3UBAL7JAQC9wQEAvMkBAKjpBQCp6QUAqvkFAKv5BQCs6QUArekFAK45BgCvOQYA5pAAgFaRAICGiAAAhwADAHKRAIB2kQCAepEAgH6RAIC40QYAudkGALrhBgC74QYAvJEGAL2dBgC+lQYAv4kGALBJBgCxSQYAsl0GALNVBgC0TQYAtfEGALbxBgC38QYAo3EFAIKRAICGkQCAipEAgI6RAICmaQUApWEFAJKRAICrjQYAqo0GAJaRAICakQCArzUGAK6JBgCtgQYArIkGAJ6RAICikQCAs+EHAKaRAIC14QcAqpEAgK6RAIC25QcAdpAAgLKRAIC7vQcAuqEHAL2VBwC8qQcAv5UHAL6VBwCoAQYAqSUGAKohBgCrIQYArCEGAK0tBgCuJQYAr1UGALaRAICCHQAAgR0AAIAdAAC6kQCAvpEAgMKRAIC+MAEAuDkGALk5BgC6yQYAu8kGALzZBgC92QYAvskGAL/JBgCwLQYAsTEGALI1BgCzCQYAtBkGALUZBgC2CQYAtwkGAKOpBgCEjAIAhigfAIdEAQDKkQCApq0GAKWpBgDOkQCAq/UGAKrpBgDSkQCA1pEAgK/dBgCu3QYArd0GAKzhBgDakQCAsxUGAN6RAIDikQCAtj0GAOaRAIDqkQCAtTUGALrZAQC72QEA7pEAgPKRAIC+fQEAv2UBALx9AQC9dQEAqMUFAKnJBQCq2QUAq9EFAKz5BQCt+QUArikCAK8pAgD2kQCA+pEAgP6RAIACkgCAjAAAAAaSAIAKkgCADpIAgLjtAgC5hQIAuo0CALuBAgC8hQIAvY0CAL69AgC/fQMAsFkCALFZAgCy7QIAs+UCALT9AgC15QIAtuUCALfVAgCjUQUAEpIAgBaSAIAakgCAHpIAgKZ5BQClcQUAIpIAgKudAgCqnQIAJpIAgCqSAICvIQIArjkCAK0xAgCsOQIAghEAAC6SAICAZQAAgQkAADKSAIC+mAMAOpIAgD6SAICEJAMAQpIAgIdoAwCGjBwARpIAgEqSAIBOkgCAUpIAgFaSAIBakgCAs6ECAITAHAC10QIAXpIAgGKSAIC21QIAZpIAgGqSAIC7wQIAuvUCAL0RAQC82QIAvxEBAL4ZAQBukgCAcpIAgHaSAIB6kgCAfpIAgIKSAICGkgCA77gGAIqSAIDhnAQAjpIAgON0BgCSkgCAlpIAgJqSAICekgCAgPkAAIH5AACCBQAAopIAgL5YHACEWB8A71wAAO9ABgDhkAEA4fwGAOM8AADjdAYAqpIAgK6SAICGmBwAh/QcAKNpAgC+DB8AspIAgLaSAIC6kgCAph0CAKUZAgC+kgCAqwkCAKo9AgDCkgCAxpIAgK/ZAQCu0QEArdkBAKwRAgCokR0AqZkdAKqhHQCroR0ArNEdAK3dHQCu1R0Ar8kdADaSAICmkgCAypIAgM6SAIDSkgCA1pIAgNqSAIDekgCAuHkeALl5HgC6zR4Au8UeALzdHgC9xR4AvsUeAL/1HgCwuR0AsY0dALKFHQCzTR4AtFUeALVdHgC2VR4At0keALjNHwC51R8Aut0fALvVHwC88R8Avf0fAL7pHwC/6R8AsKUfALGxHwCysR8As40fALSVHwC19R8Atv0fALf1HwCoGR4AqRkeAKotHgCrPR4ArCUeAK0tHgCuJR4Ar90fAOKSAIDmkgCA6pIAgO6SAIDykgCAxpEAgPaSAID6kgCAs+UfAP6SAIACkwCABpMAgAqTAIC27R8Ate0fAA6TAIC7NR4AuiEeABKTAIAWkwCAv3EeAL4RHgC9GR4AvCUeAIJpAACjoR8AgFkAAIFRAACmqR8AGpMAgB6TAIClqR8AqmUeAKtxHgCGAAQAh+wBAK5VHgCvNR4ArGEeAK1dHgCoMR4AqTEeAKpBHgCrQR4ArEEeAK1JHgCucR4Ar3EeACKTAIAmkwCAKpMAgC6TAIAykwCANpMAgDqTAIA+kwCAuCkBALkpAQC6OQEAuzUBALwtAQC90QAAvtEAAL/RAACwyQEAsckBALLZAQCz2QEAtMkBALXJAQC2GQEAtxkBALPJHQBCkwCARpMAgEqTAIBOkwCAtskdALXJHQBSkwCAuw0CALoNAgBWkwCAWpMAgL8NAgC+DQIAvQ0CALwNAgBekwCAo40dAGKTAIBmkwCApo0dAGqTAIBukwCApY0dAKpJAgCrSQIAcpMAgHaTAICuSQIAr0kCAKxJAgCtSQIAgA0AAIERAACCEQAAepMAgO/MAgB+kwCAgpMAgISQAgDjLAIAvigDAOHYAQCKkwCAhhAEAIfUAwCOkwCAkpMAgLNhAwCWkwCAmpMAgJ6TAICikwCAtnkDALVxAwCmkwCAu10DALpdAwCqkwCArpMAgL/hAAC++QAAvfEAALz5AACjoQIAspMAgLaTAIC6kwCAvpMAgKa5AgClsQIAwpMAgKudAgCqnQIAxpMAgMqTAICvIQEArjkBAK0xAQCsOQEAzpMAgNKTAIDvZB8A1pMAgNqTAIDekwCA4pMAgOaTAICADQAAgREAAIIVAADqkwCA4eAcAO6TAIDjiB8A8pMAgISAAgC+jAUAh0gFAIYsBAD6kwCA/pMAgO+kHgDv9B4A4QAeAOFQHwDjLB4A47AeAAKUAIAGlACACpQAgA6UAIASlACAFpQAgISEBACzcQEAGpQAgLUdAQC2FQEAHpQAgCKUAIAmlACAugEBALsBAQC89QAAvf0AAL71AAC/7QAAqK0GAKm9BgCqtQYAq8kGAKzZBgCt2QYArskGAK/BBgAqlACALpQAgDKUAIA2lACAOpQAgD6UAIBClACARpQAgLhtBwC5BQcAug0HALsBBwC8AQcAvQEHAL4BBwC/AQcAsIkGALGJBgCybQcAs2UHALR9BwC1ZQcAtmUHALdVBwCGkwCAozkGAEqUAID2kwCApl0GAE6UAIBSlACApVUGAKpJBgCrSQYAVpQAgFqUAICuvQcAr6UHAKy9BwCttQcAgG0AAIEJAACCGQAAXpQAgGKUAIC+nAMAZpQAgGqUAICGQAAAh2AAAG6UAIBylACAdpQAgHqUAIB+lACAgpQAgKiRBgCpkQYAqrkGAKu5BgCsqQYArakGAK7ZBgCv2QYAhpQAgIqUAICOlACAkpQAgJaUAICalACAnpQAgKKUAIC4cQEAuXEBALpxAQC7cQEAvNkBAL3BAQC+wQEAv/UBALCxBgCxuQYAsokGALOJBgC0UQEAtVEBALZRAQC3UQEAszEGAKaUAICqlACArpQAgLKUAIC2KQYAtSEGALaUAIC7fQYAunUGALqUAIC+lACAv5UBAL6VAQC9XQYAvF0GAMKUAICjdQYAxpQAgMqUAICmbQYAzpQAgNKUAIClZQYAqjEGAKs5BgCErAEAvqABAK7RAQCv0QEArBkGAK0ZBgCo3QIAqe0CAKrlAgCr/QIArOUCAK3tAgCu5QIArz0DANqUAIDelACA4pQAgL5kDADmlACA6pQAgO6UAIDylACAuMkDALnJAwC62QMAu9EDALz5AwC9+QMAvpkDAL+VAwCwRQMAsU0DALJFAwCzXQMAtEUDALVNAwC2RQMAt/kDAIFVAwCASQMAs2UCAIJVAwC1ZQIA9pQAgPqUAIC2ZQIAhgAMAIfkAwC7gQMAuokDAL2BAwC8mQMAv4EDAL6JAwCjLQIA/pQAgAKVAIAGlQCACpUAgKYtAgClLQIADpUAgKvJAwCqwQMAEpUAgBaVAICvyQMArsEDAK3JAwCs0QMA49gGAOGsBwDhnAYA45wGABqVAICEWA0AHpUAgCKVAIAmlQCAKpUAgC6VAIAylQCA7xwBADaVAIA6lQCA70AGAIB5AACBFQAAghEAAIQADAA+lQCA46wAAEKVAIDhpAEASpUAgO9wAACGyAwAh6QNAE6VAIBSlQCAVpUAgFqVAIC6yQUAu8kFALilBQC5zQUAvvkFAL/5BQC8zQUAvcUFALKlBQCzrQUAsBEGALERBgC2rQUAt50FALS1BQC1rQUAqmEGAKthBgConQYAqZUGAK5hBgCvYQYArHEGAK1xBgBelQCAYpUAgGaVAIBqlQCAbpUAgHKVAIC+sAwAdpUAgKghDgCpIQ4AqiEOAKs9DgCsJQ4ArS0OAK4lDgCviQ4ARpUAgHqVAIB+lQCAgpUAgIaVAICKlQCAjpUAgJKVAIC4UQ8AuV0PALpVDwC7bQ8AvHUPAL19DwC+dQ8Av2kPALD5DgCxoQ4AsqEOALOhDgC0oQ4AtakOALaRDgC3kQ4As6kOAJaVAIDWlACAmpUAgJ6VAIC2rQ4Ata0OAKKVAIC7ZQ4Auj0OAKaVAICqlQCAv20OAL5lDgC9dQ4AvHUOAIIZAACj7Q4AgGUAAIEZAACm6Q4ArpUAgLKVAICl6Q4AqnkOAKshDgC2lQCAupUAgK4hDgCvKQ4ArDEOAK0xDgCoYQ4AqXUOAKp9DgCrdQ4ArG0OAK31DgCu/Q4Ar/UOAIaAAQCHpAEAvpUAgMKVAIDGlQCAypUAgM6VAIDSlQCAuHUBALl9AQC6dQEAu8kBALzdAQC9xQEAvsUBAL/1AQCwjQ4AsZUOALKdDgCzkQ4AtFUBALVdAQC2VQEAt00BALP1DgDWlQCA2pUAgN6VAIDilQCAtnUOALXlDgDmlQCAu1EOALpJDgDqlQCA7pUAgL+ZAQC+kQEAvUUOALxJDgDylQCAo7EOAPaVAID6lQCApjEOAP6VAIAClgCApaEOAKoNDgCrFQ4ABpYAgAqWAICu1QEAr90BAKwNDgCtAQ4AqO0CAKktAwCqJQMAqz0DAKwlAwCtLQMAriUDAK+ZAwAOlgCAEpYAgBaWAIAalgCAHpYAgCKWAIC+dAIAKpYAgLiNAwC5kQMAupEDALulAwC8vQMAvXUAAL59AAC/dQAAsOkDALHpAwCy+QMAs/EDALTZAwC12QMAtrkDALe1AwCArQAAgbUAAIK9AACzoQMALpYAgLWhAwC2oQMAMpYAgITgAgA2lgCAuiEDALshAwC8IQMAvSkDAL4RAwC/EQMAo+0DAIXABACFtG8AOpYAgD6WAICm7QMApe0DAEKWAICrbQMAqm0DAIZIBQCHbAMAr10DAK5dAwCtZQMArG0DAEaWAIDjAA4A71hsAOG0DwBKlgCATpYAgFKWAIBWlgCAoakDAKD9DwCjwQMAog0DAOHgAwDv4A8A4+QDAFqWAIBelgCAYpYAgIQEBAC+BAQAZpYAgO+UAwBqlgCAbpYAgHKWAIDj1AMAdpYAgOFUAAB6lgCAfpYAgIKWAICGlgCAgA0AAIEVAACCHQAAipYAgI6WAICSlgCAj5EbAO+cDgCE4AcA4dQOAJqWAIDj8A4AnpYAgKKWAICGGAcAh5AEAJnlFwCY5RcAm+kLAJo5CwCd/QoAnPELAJ9VDwCeXQ8AkSkfAJDNGwCTJR8Aks0fAJXREwCUKRMAlxkXAJZ1EwCM4RAAjSUQAI4tEACP+QwAJpYAgJaWAICKORQAi5UUAITpGACFBRgAhuUYAIfxFACmlgCAqpYAgIIxHACDFRwAnKkEAK6WAICylgCAtpYAgLqWAIC+lgCAmtEEAJt9BACUTQ0AleUIAJblCACXtQgAwpYAgMaWAICSWQwAk1kMAKGRAADKlgCAowF8AKKZAACluXwApJF8AKeZeACm4X0AqYF5AKiheACriXQAqgF0AK0BcACsWXQAr4VwAK6dcACx4WwAsAFsALMBaACyHWwAtfVoALT1aADOlgCA0pYAgNaWAIDalgCA3pYAgOKWAIDmlgCA6pYAgO6WAIDylgCAqD0HAKmVBwCqlQcAq6kHAKzdBwCtxQcArsUHAK8dBgD2lgCAgh0AAIEdAACAHQAA+pYAgP6WAIAClwCAvmABALgZBgC5GQYAuikGALslBgC8IQYAvSEGAL4hBgC/IQYAsHEGALFxBgCycQYAs3EGALRNBgC1NQYAtj0GALctBgCzHQcACpcAgIYoAACHqAAADpcAgLZFBwC1VQcAEpcAgLu1BgC6tQYAFpcAgBqXAIC/8QYAvokGAL2lBgC8pQYAHpcAgKNZBwAilwCAJpcAgKYBBwAqlwCALpcAgKURBwCq8QYAq/EGADKXAIA2lwCArs0GAK+1BgCs4QYAreEGAKipBQCptQUAqr0FAKs9AgCsJQIArVECAK5RAgCvUQIAOpcAgD6XAIBClwCARpcAgIQ8AwBKlwCATpcAgFKXAIC4pQIAua0CALqlAgC7vQIAvKUCAL2tAgC+pQIAv30DALAxAgCxMQIAshkCALMZAgC09QIAta0CALalAgC3nQIAVpcAgFqXAIBelwCAszkFAGKXAIC1oQIAtt0CAGaXAIBqlwCAbpcAgLr5AgC7+QIAvMECAL3BAgC+PQIAv2UCAHKXAICmgQIApf0CAHqXAICjZQUAvlh8AIbYfACHnHwArzkCAK5hAgCtnQIArJ0CAKulAgCqpQIAfpcAgIKXAICohQIAqZUCAKqVAgCrpQIArL0CAK3VAgCu0QIAr9ECAIGFAQCAhQEAhpcAgILtAQCKlwCAjpcAgJKXAICWlwCAuHUBALl9AQC6dQEAu80BALzVAQC93QEAvskBAL/BAQCwtQIAsb0CALKBAgCzgQIAtFEBALVRAQC2UQEAt1EBAJqXAICelwCAopcAgKaXAIDhMAYA4WQHAOMoBgDjxAYAhCB9AKqXAIDvbAAA7xgGAK6XAICylwCAtpcAgLqXAICzXQIAvkh8AL6XAIDClwCAxpcAgLYVAgC1dQIAypcAgLs5AgC6MQIAzpcAgNKXAIC/1QEAvtUBAL0VAgC8FQIAo519AHaXAIDWlwCA2pcAgN6XAICm1X0ApbV9AOKXAICr+X0AqvF9AOaXAIDqlwCArxV+AK4VfgCt1X0ArNV9AIBNAACBVQAAglUAALOxfgDulwCAtWV/ALZtfwDylwCAhkADAIcEAwC66X8Au+l/ALz5fwC9+X8Avt1/AL/NfwD2lwCA+pcAgAaXAID+lwCAApgAgAaYAIAKmACADpgAgKhtfgCpXX4AqlV+AKuFfwCsgX8ArYF/AK6BfwCvgX8AsEF/ALFBfwCyQX8As0F/ALR1fwC1ZX8Atm1/ALdlfwC4XX8AuS1/ALolfwC7PX8AvC1/AL0dfwC+FX8Av/UAAKP9fwASmACAFpgAgBqYAIAemACApiF+AKUpfgAimACAq6V+AKqlfgAmmACAKpgAgK+BfgCukX4ArbV+AKy1fgAumACAMpgAgDaYAIA6mACAPpgAgEKYAIBGmACASpgAgIA9AACBCQAAghkAAE6YAIBSmACAhLgBAL6wAQBWmACAqK0BAKnVAQCq1QEAqw0BAKwVAQCtGQEArgkBAK8JAQCGAAQAhwQBAFqYAIBemACAYpgAgGaYAIBqmACAbpgAgLjtAAC5hQAAuo0AALuFAAC8nQAAvYUAAL6NAAC/hQAAsHkBALF5AQCy7QAAs+UAALT9AAC15QAAtuUAALfVAACzXQIAcpgAgHaYAIB6mACAfpgAgLaZAgC1nQIAgpgAgLu9AgC6vQIAhpgAgIqYAIC/IQMAvjkDAL0xAwC8OQMAvigDAKMZAgCOmACAkpgAgKbdAgCWmACAmpgAgKXZAgCq+QIAq/kCAJ6YAICimACArn0DAK9lAwCsfQMArXUDAL7IBACmmACAqpgAgL7EBQCumACAspgAgLaYAIC6mACAgD0AAIEJAACCGQAAvpgAgMKYAICEOAMAypgAgM6YAIDveAIA0pgAgIZIBACHVAMA1pgAgNqYAIDemACA4pgAgOaYAIDqmACA7pgAgPKYAIDjVAIA9pgAgOFAAQD6mACA/pgAgOMkfwACmQCA4Zx8AAaZAIAKmQCADpkAgBKZAICEbAUAFpkAgBqZAIAemQCAIpkAgO8YfwAmmQCAKpkAgLPxAgAumQCAMpkAgDqZAIA+mQCAtukCALXhAgBCmQCAu3EBALppAQCHoAUAhswEAL85AQC+WQEAvVEBALxhAQDhQH8ARpkAgOM4fgCEwAQAgtkAAO8UAACApQAAgdkAAEqZAIDjwAAATpkAgOHUAQBSmQCAVpkAgO+EfgBamQCAqs0BAKvVAQBemQCAYpkAgK79AQCvnQEArMUBAK31AQBmmQCAo1UCAGqZAIBumQCApk0CAHKZAIB2mQCApUUCAMaYAIA2mQCAepkAgH6ZAICCmQCAhpkAgIqZAICOmQCAqJkGAKmZBgCq7QYAq/0GAKzlBgCt7QYAruUGAK/dBgCwpQYAsa0GALKlBgCzuQYAtK0GALVVBwC2UQcAt00HALh1BwC5fQcAunUHALtJBwC8WQcAvVkHAL5JBwC/RQcAs0UGAJKZAICWmQCAmpkAgJ6ZAIC2TQYAtU0GAKKZAIC7SQYAukEGAIYIAACHjAAAv7EHAL5JBgC9TQYAvFEGAIJdAACjAQYAgEUAAIFdAACmCQYAqpkAgK6ZAIClCQYAqgUGAKsNBgCymQCAtpkAgK4NBgCv9QcArBUGAK0JBgCoTQYAqVUGAKpVBgCriQYArLEGAK29BgCuqQYAr6kGAKaZAIC6mQCAvpkAgMKZAIDGmQCAypkAgM6ZAIDSmQCAuEkBALlJAQC6WQEAu1kBALxJAQC9SQEAvt0BAL/VAQCw3QYAsa0GALKlBgCzjQYAtJkGALWZBgC2jQYAt4UGALPdBgDWmQCA2pkAgN6ZAIDimQCAtj0GALU5BgDmmQCAu2kGALoZBgDqmQCA7pkAgL9dBgC+XQYAvVkGALxxBgDymQCAo5kGAPaZAID6mQCApnkGAP6ZAIACmgCApX0GAKpdBgCrLQYABpoAgAqaAICuGQYArxkGAKw1BgCtHQYAqNUCAKndAgCq4QIAq+ECAKw1AwCtPQMArjUDAK8tAwCAzQMAgQkAAIIZAAAOmgCAEpoAgIQYAgC+dAMAGpoAgLjpAwC56QMAuokDALuFAwC8nQMAvYEDAL6BAwC/tQMAsFUDALFdAwCyVQMAs+kDALT5AwC1+QMAtukDALfhAwCGIAwAhxADAB6aAIAimgCAJpoAgCqaAIAumgCA71wCADKaAIDhFAAANpoAgOOIAgC++AwAOpoAgD6aAIBCmgCAu/kDALrxAwC+gA0ARpoAgL9dAwC+XQMAvV0DALzhAwCzCQIASpoAgE6aAIBSmgCAVpoAgLbdAwC13QMAWpoAgKipBgCpqQYAqrkGAKu5BgCsqQYArakGAK4dBQCvFQUAXpoAgGKaAIBmmgCAapoAgG6aAIBymgCAdpoAgHqaAIC4GQUAuS0FALolBQC7yQUAvNkFAL3FBQC+zQUAv8UFALBtBQCxdQUAsnUFALNFBQC0XQUAtT0FALY1BQC3KQUA4fQGAOFUBwDjFAYA47wGAIEJAACAqQAAfpoAgII5AACE7A0AgpoAgIeIDACGDAwAipoAgI6aAIDvzAcA78QHAKMpAwCSmgCAlpoAgJqaAICemgCApv0CAKX9AgCimgCAq9kCAKrRAgCmmgCAqpoAgK99AgCufQIArX0CAKzBAgCoPQ4AqY0OAKqFDgCrnQ4ArIUOAK2NDgCuuQ4Ar7UOAIaaAICumgCAspoAgLaaAIC6mgCAvpoAgMKaAIDGmgCAuL0OALllDwC6bQ8Au2UPALx9DwC9ZQ8Avm0PAL9lDwCw1Q4Asd0OALLVDgCzoQ4AtJUOALWdDgC2lQ4At40OALMNDgDKmgCAzpoAgNKaAIDWmgCAtg0OALUNDgDamgCAuxkOALoRDgDemgCAFpoAgL9ZDgC+UQ4AvXUOALwBDgDimgCAo0kOAOaaAIDqmgCApkkOAO6aAIDymgCApUkOAKpVDgCrXQ4AhKQDAPaaAICuFQ4Arx0OAKxFDgCtMQ4AqLEOAKmxDgCqzQ4Aq8UOAKzdDgCtxQ4ArsUOAK/1DgCA7QEAgfEBAILxAQD6mgCAhpABAIe0AQD+mgCAApsAgLjFAQC5zQEAusUBALvdAQC8zQEAvf0BAL6ZAQC/lQEAsI0OALFBAQCyQQEAs0EBALRBAQC1QQEAtkEBALdBAQCzRQ4ABpsAgAqbAIAOmwCAEpsAgLZFDgC1VQ4AFpsAgLuFAQC6SQ4AGpsAgB6bAIC/hQEAvoUBAL2VAQC8lQEAIpsAgKMBDgAmmwCAKpsAgKYBDgAumwCAMpsAgKURDgCqDQ4Aq8EBADabAIA6mwCArsEBAK/BAQCs0QEArdEBAKgtAwCpPQMAqjUDAKuJAwCsmQMArZkDAK6JAwCvgQMAPpsAgEKbAIBGmwCASpsAgE6bAIBSmwCAVpsAgFqbAIC4rQMAuWUAALptAAC7ZQAAvH0AAL1lAAC+bQAAv2UAALDJAwCxyQMAsqkDALOlAwC0vQMAtaEDALahAwC3lQMAgL0AAIEJAACCGQAAXpsAgGKbAIC+2AMAapsAgG6bAICErAIAcpsAgIfoAwCGDAQAdpsAgHqbAIB+mwCAgpsAgLP9AwCGmwCAipsAgI6bAICSmwCAtlkDALVRAwCWmwCAu00DALpNAwCamwCAnpsAgL8lAwC+OQMAvTEDALw9AwCimwCAppsAgKqbAICumwCA71gPALKbAIC2mwCAupsAgOOQDgC+mwCA4bAPAMKbAIDGmwCAypsAgM6bAIDSmwCAgHUAAIF9AACCdQAAhBgFAO88AwDamwCAvhQFAN6bAIDj0AMA4psAgOFAAADmmwCAhtAEAIdYBQDqmwCA7psAgPKbAID2mwCA+psAgP6bAIACnACABpwAgAqcAIDvrA8AhOwEAOEQDgAOnACA41QBABKcAIAWnACAGpwAgB6cAICj/QIAIpwAgCacAIAqnACALpwAgKZZAgClUQIAMpwAgKtNAgCqTQIANpwAgDqcAICvJQIArjkCAK0xAgCsPQIAqJkGAKmZBgCqrQYAq70GAKylBgCtrQYArqUGAK/ZBgDWmwCAghEAAIEZAACAwQcAPpwAgEKcAIC+cAMARpwAgLhJBwC5SQcAul0HALtVBwC8TQcAvXEHAL51BwC/bQcAsKkGALGpBgCyuQYAs7EGALSZBgC1mQYAtnkHALd5BwC1NQYASpwAgE6cAIC2NQYAhjAAAIdcAwCzPQYAUpwAgL19BgC8dQYAv0UGAL5FBgBmmwCAVpwAgLt1BgC6dQYAo2UGAFqcAIBenACAYpwAgGacAICmbQYApW0GAGqcAICrLQYAqi0GAG6cAIBynACArx0GAK4dBgCtJQYArC0GAKhVBgCpWQYAqm0GAKthBgCsaQYArWkGAK6ZBgCvmQYAdpwAgHqcAIB+nACAgpwAgIacAICKnACAjpwAgJKcAIC4+QYAufkGALqNBgC7hQYAvJ0GAL2FBgC+hQYAv7UGALDpBgCx6QYAsvkGALP5BgC06QYAtd0GALbJBgC3yQYAs+UGAJacAICanACAnpwAgKKcAIC26QYAteEGAKacAIC7LQYAui0GAKqcAICunACAvxkGAL4tBgC9LQYAvC0GAIIVAACjoQYAgGEAAIFhAACmrQYAspwAgL6QAQClpQYAqmkGAKtpBgCEpAEAupwAgK5pBgCvXQYArGkGAK1pBgCohQIAqY0CAKqVAgCruQIArNUCAK3dAgCu1QIAr80CAIaAHACHZAMAvpwAgL5gAwDCnACAxpwAgMqcAIDOnACAuHUDALl9AwC6dQMAu8kDALzZAwC92QMAvskDAL/BAwCwvQIAsY0CALKFAgCzTQMAtFUDALVdAwC2VQMAt00DALMdAgDSnACAhAgDANacAIDanACAtl0CALVdAgDenACAu0kCALp5AgDinACA5pwAgL+ZAwC+kQMAvZkDALxRAgCwAAAAo1kCAOqcAIDunACAphkCAPKcAID2nACApRkCAKo9AgCrDQIA+pwAgP6cAICu1QMAr90DAKwVAgCt3QMAAp0AgAadAIAKnQCA76wGAA6dAIASnQCAFp0AgBqdAIC+6BwAHp0AgCKdAIAqnQCALp0AgOGABwAynQCA42AGAIBdAACBYQAAgmEAALN9AQA2nQCAtW0BALZlAQA6nQCAhiAdAIdYHQC6+QEAu/EBALzZAQC92QEAvrEBAL+xAQDvoAAAPp0AgEKdAIBGnQCASp0AgE6dAIBSnQCA71wBAIRsHADhzAYAVp0AgOMcBgDjSAAAWp0AgOEwAQBenQCAo/EBAGKdAICFABQAZp0AgGqdAICm6QEApeEBAG6dAICrfQEAqnUBAHKdAIB2nQCArz0BAK49AQCtVQEArFUBAKjtHQCpLR4AqjkeAKs5HgCsKR4ArSkeAK6dHgCvkR4AJp0AgHqdAIB+nQCAgp0AgIadAICC+QAAgfEAAID9AAC4qR4AuakeALpJHwC7SR8AvFkfAL1FHwC+TR8Av0UfALDxHgCx+R4AssEeALPBHgC0uR4AtbkeALatHgC3pR4AsBEfALERHwCyER8AsyUfALQlHwC1KR8Atl0fALdRHwC4cR8AuXkfALpBHwC7QR8AvJUAAL2dAAC+lQAAv40AAIqdAIC2nACAjp0AgJKdAICWnQCAmp0AgIb4AwCH0AAAqM0fAKnVHwCq0R8Aq70fAKytHwCtcR8ArnEfAK9xHwCzOR4Anp0AgKKdAICmnQCAqp0AgLaRHgC1RR4Arp0AgLu1HgC6tR4Asp0AgLadAIC/jR4AvoEeAL2RHgC8pR4Aup0AgKN9HgC+nQCAwp0AgKbVHgDGnQCAyp0AgKUBHgCq8R4Aq/EeAM6dAIDSnQCArsUeAK/JHgCs4R4ArdUeAKhVAQCpgQAAqoEAAKuBAACsgQAArYkAAK6xAACvsQAA1p0AgNqdAIDenQCA4p0AgOadAIDqnQCA7p0AgPKdAIC4ZQAAuW0AALplAAC7fQAAvGUAAL1tAAC+ZQAAv90DALChAACxrQAAsqUAALO5AAC0qQAAtZ0AALaVAAC3XQAA9p0AgIIdAACBHQAAgB0AAPqdAID+nQCAAp4AgL4UAgAKngCAhKgCAA6eAIASngCAFp4AgBqeAIAengCAjwAAALNJAwAingCAhugEAIesAgAmngCAtkkDALVJAwAqngCAuykDALolAwAungCAMp4AgL8ZAwC+LQMAvS0DALwxAwA2ngCAo40DADqeAIA+ngCApo0DAEKeAIBGngCApY0DAKrhAwCr7QMASp4AgE6eAICu6QMAr90DAKz1AwCt6QMAvoQDAFKeAIBWngCAWp4AgF6eAIBingCAZp4AgGqeAICAPQAAgQkAAIIZAABungCAcp4AgHqeAICENAMAfp4AgLMtAQCCngCAh8wCAIZMBQCGngCAti0BALUtAQCKngCAu0kBALp5AQCOngCAkp4AgL+9AQC+vQEAvbkBALxRAQDheB8Alp4AgOPQHwCangCAnp4AgOGUAQCingCA42gDAKaeAICqngCArp4AgO+IAwCyngCAtp4AgO+sHwC6ngCAvp4AgMKeAIDGngCAyp4AgM6eAIDSngCA1p4AgO9EHgDangCA4dweAN6eAIDjHB4A4p4AgOqeAIDungCA8p4AgIFpAACAZQAAo+UBAIJ9AACl5QEA9p4AgIQUBACm5QEAvigEAPqeAICrgQEAqrEBAK1xAQCsmQEAr3UBAK51AQCoIQYAqS0GAKolBgCrPQYArCUGAK0tBgCuXQYAr00GAHaeAIDmngCAhggDAIeMAwD+ngCAAp8AgAafAIAKnwCAuOkGALnpBgC6jQYAu4UGALydBgC9hQYAvo0GAL+FBgCwPQYAsQ0GALIFBgCz7QYAtPkGALX5BgC27QYAt+UGALDNBwCx1QcAstEHALPtBwC09QcAtf0HALbpBwC36QcAuN0HALklBwC6LQcAuyUHALw9BwC9JQcAvi0HAL8lBwAOnwCAEp8AgAaeAIAWnwCAGp8AgB6fAIAinwCAJp8AgKgVBgCpGQYAqu0HAKv9BwCs7QcArd0HAK7VBwCvuQcAswUGACqfAIAunwCAMp8AgDafAIC2PQYAtQUGADqfAIC7cQYAumkGAD6fAIBCnwCAv1kGAL5RBgC9WQYAvGUGAEafAICjQQYASp8AgE6fAICmeQYAUp8AgIS0AQClQQYAqi0GAKs1BgC+gAEAWp8AgK4VBgCvHQYArCEGAK0dBgCoNQYAqT0GAKo1BgCrWQYArHUGAK2lAQCurQEAr6UBAIDpAACB6QAAgv0AAL8kAQCGMA8Ah+QAAF6fAIBinwCAuMUAALnNAAC6xQAAu90AALzNAAC9/QAAvvUAAL+dAACw3QEAsSUBALItAQCzIQEAtCEBALUhAQC2IQEAtyEBALvBAgC6OQIAZp8AgGqfAIC/xQIAvsUCAL3VAgC82QIAs50FAG6fAIBynwCAdp8AgIwAAAC2BQIAtd0FAHqfAICqfQIAq4UCAH6fAICCnwCAroECAK+BAgCsnQIArZECAIafAICj2QUAip8AgI6fAICmQQIAkp8AgJafAIClmQUAgpFqAIORagCanwCAnp8AgIa5FgCH6RcAhBEWAIWZFgCKoRIAi6ESAKKfAICmnwCAjpEeAI9ZHgCMmRMAjREeAJJxGgCT5RoAqp8AgO/oJACW8QYAlwUGAJTlGgCVGQYAmikCAJvFAgCunwCAsp8AgLafAIDhKBsAnN0CAOMgDwCfIQcAnsEHAJ01GwCcLRsAm6EbAJr5HwCZOR8AmLEfAJcBEgCWIRMAlSkTAJRRFgCTGRcAkjEXAJGxFwCQKWsAj1FrAOOsBwCEBA0A4RwHAIANAACBNQAAgj0AALqfAIC+nwCAwp8AgL4gDQDKnwCAzp8AgO9MBwCGWAwAh2ANANKfAIDWnwCA2p8AgN6fAICEXA8A4p8AgO8IAADvhAYA4ZABAOGwBgDj4AAA42QGAOafAIDqnwCA7p8AgPKfAID2nwCA+p8AgL4ADwCEQA4A/p8AgAKgAIAGoACACqAAgA6gAIASoACAFqAAgBqgAICj1QMAotUDAKExAwCgLQcAVp8AgMafAIAeoACAIqAAgCagAICCmQAAgZEAAICZAACoTQ0AqZ0NAKqVDQCrJQ4ArD0OAK0RDgCuEQ4ArxEOALB9DgCxDQ4AsgUOALMtDgC0OQ4AtTkOALYtDgC3JQ4AuOkOALnpDgC6wQ4Au8EOALy5DgC9nQ4AvpUOAL+NDgCzPQ0AKqAAgC6gAIAyoACANqAAgLaxDgC1lQ4AOqAAgLvpDgC6mQ4AhogAAIfkAAC/3Q4Avt0OAL3ZDgC88Q4APqAAgKN5DQC+hAEAhIAGAKb1DgBCoACARqAAgKXRDgCq3Q4Aq60OAEqgAIBOoACArpkOAK+ZDgCstQ4ArZ0OALIFNQCzGTQAsG0wALENNQBSoACAVqAAgLQBKAC1PSkAWqAAgF6gAIBioACAZqAAgGqgAIBuoACAcqAAgHagAICiRQEAo9UBAHqgAIChTQEAps0FAKcBOACkAQQApX0FAKoBPACrRT0AqEk5AKnlOQCudTEAr30xAKxdPQCtATAAqO0OAKn1DgCqCQ4AqwkOAKwZDgCtGQ4Arg0OAK8tDgB+oACAgqAAgIagAICKoACAjqAAgJKgAICWoACAmqAAgLgdDgC5JQ4Aui0OALslDgC8PQ4Avd0BAL7VAQC/zQEAsFUOALFdDgCyVQ4Asy0OALQ1DgC1JQ4Ati0OALclDgCzgQ0AnqAAgKKgAICqoACArqAAgLaZDQC1kQ0AvlQEALuZDQC6kQ0AhogEAIe8AwC/4Q0AvvENAL35DQC8gQ0AgkkAAKPFDQCA9QMAgUkAAKbdDQCyoACAtqAAgKXVDQCq1Q0Aq90NALqgAIC+oACArrUNAK+lDQCsxQ0Arb0NAKgdAgCpRQIAql0CAKtVAgCseQIArXkCAK6JAwCviQMAwqAAgMagAIDKoACAzqAAgIT8BQDSoACA1qAAgNqgAIC4iQMAuWUDALptAwC7ZQMAvH0DAL1lAwC+bQMAv2UDALDBAwCxwQMAssEDALPBAwC0wQMAtcEDALbBAwC3wQMA3qAAgOKgAIDmoACA6qAAgO6gAIDhpAEA8qAAgOPADgC+aAQA9qAAgPqgAIDvHAEA/qAAgAKhAIAGoQCACqEAgLOVAwAOoQCAEqEAgBqhAIAeoQCAtrkDALWxAwAioQCAu0UCALpFAgCGqAQAh6QFAL9FAgC+RQIAvVUCALxVAgDh4A4A4SwMAOMIDgDj1A4AgK0AAIHRAACC0QAAJqEAgCqhAIAuoQCAMqEAgDahAIA6oQCAPqEAgO+IDgDvLA4AoxUDAEKhAICFxCsARqEAgEqhAICmOQMApTEDAE6hAICrxQIAqsUCAFKhAIBWoQCAr8UCAK7FAgCt1QIArNUCAKgNBgCpFQYAql0GAKtVBgCseQYArXkGAK65BgCvuQYAFqEAgFqhAIBeoQCAYqEAgGahAIBqoQCAbqEAgHKhAIC4TQcAuVUHALpRBwC7aQcAvHkHAL1lBwC+bQcAv2UHALDJBgCxyQYAst0GALPVBgC0zQYAtXUHALZ9BwC3dQcAs9UGAHahAIB6oQCAfqEAgIKhAIC2+QYAtfEGAIahAIC7DQYAug0GAIYIAACHLAAAv7EHAL4JBgC9AQYAvAkGAIJRAACjkQYAgEEAAIFBAACmvQYAiqEAgI6hAICltQYAqkkGAKtJBgCSoQCAlqEAgK5NBgCv9QcArE0GAK1FBgCwsQYAsbEGALLNBgCzwQYAtMEGALXJBgC28QYAt/EGALgFAQC5DQEAugUBALsdAQC8BQEAvQ0BAL4FAQC/uQEAmqEAgJ6hAICioQCApqEAgKqhAICuoQCApqAAgLKhAICoLQYAqTUGAKo1BgCr8QYArNEGAK3RBgCu0QYAr9EGALPdBgC2oQCAuqEAgL6hAIDCoQCAtjEGALU5BgDGoQCAuxUGALoVBgDKoQCAzqEAgL9tBgC+ZQYAvXUGALx5BgDSoQCAo5kGANahAIDaoQCApnUGAN6hAIDioQCApX0GAKpRBgCrUQYA5qEAgOqhAICuIQYArykGAKw9BgCtMQYAqNUCAKndAgCq4QIAq+ECAKxRAwCtUQMArlEDAK9RAwDuoQCA8qEAgL7sAwD6oQCA/qEAgAKiAIAGogCACqIAgLjpAwC56QMAuokDALuFAwC8nQMAvYEDAL6BAwC/tQMAsDEDALExAwCyNQMAs+kDALT5AwC1+QMAtukDALfhAwCAbQMAgaUAAIKtAACzZQIADqIAgLXVAwC23QMAEqIAgITgAgAWogCAuvkDALv5AwC87QMAvTEDAL4xAwC/MQMAh+wDAIZkPACyAAAAGqIAgB6iAIDjCAQAIqIAgOHsBgAmogCA7wAGACqiAIAuogCAMqIAgDaiAIA6ogCAPqIAgEKiAIBGogCASqIAgE6iAIDjoAMAUqIAgOGoAQBWogCA7/ADAIIdAACBHQAAgB0AAFqiAIBeogCAYqIAgGqiAIC+TD0AbqIAgKOhAwC+QDwApRECAHKiAIB2ogCAphkCAIRsAgB6ogCAqz0CAKo9AgCt9QIArCkCAK/1AgCu9QIAhkA8AIe0PQB+ogCAgqIAgIaiAICKogCAjqIAgO9EBgCSogCA4dQGAJaiAIDjDAcAmqIAgJ6iAICiogCApqIAgLP1AQCqogCArqIAgLKiAIC2ogCAtkUBALXlAQC6ogCAuzEBALopAQC+ogCAwqIAgL8dAQC+HQEAvRkBALwlAQCoLT4AqTU+AKo9PgCrNT4ArC0+AK2FPgCuhT4Ar7k+AGaiAIDGogCAyqIAgM6iAICAGQAAgRkAAIIFAADSogCAuLk+ALm5PgC6ST8Au0k/ALxZPwC9WT8Avk0/AL9BPwCwrT4AsbU+ALKxPgCzjT4AtJk+ALWZPgC2iT4At4k+AKO1PgCEjAIA1qIAgNqiAIDeogCApgU+AKWlPgDiogCAq3E+AKppPgCGCAAAh2gDAK9dPgCuXT4ArVk+AKxlPgDmogCAs5E/AOqiAIDuogCAtlk/APKiAID2ogCAtbk/ALp1PwC7fT8A+qIAgP6iAIC+QT8Av0E/ALxZPwC9VT8AsJU+ALGdPgCyqT4As6U+ALShPgC1oT4AtqE+ALehPgC45T4Aue0+ALrlPgC7/T4AvO0+AL3dPgC+1T4AvxkBAAKjAIAGowCACqMAgA6jAIASowCA9qEAgBajAIAaowCAqF0+AKkhPgCqPT4AqzU+AKwVPgCt/T4ArvU+AK/tPgCj1T4AHqMAgCKjAIAmowCAKqMAgKYdPgCl/T4ALqMAgKs5PgCqMT4AMqMAgDajAICvBT4ArgU+AK0RPgCsHT4AgREAAIANAAA6owCAghkAAD6jAIBCowCAhJQBAL4QAACGQAcAhwABAEqjAIBOowCAUqMAgFajAIBaowCAXqMAgKiNAgCplQIAqpUCAKvNAgCs2QIArdkCAK7NAgCvxQIAYqMAgGajAIBqowCAbqMAgIwAAAByowCAdqMAgHqjAIC4HQMAucEDALrBAwC7wQMAvMEDAL3JAwC+8QMAv/EDALCJAgCxiQIAsikDALMpAwC0OQMAtTkDALYpAwC3JQMAsx0CAH6jAICCowCAhqMAgIqjAIC2WQIAtVECAI6jAIC7TQIAuk0CAJKjAICWowCAv/0DAL79AwC9/QMAvP0DAJqjAICeowCAoqMAgKajAIDhDD4AqqMAgOOoPwCuowCAgT0AAIAxAADvUD8Agh0AALKjAIC++AQAhhgFAIdMAwCEDAIA48wAALqjAIDhvAEAvqMAgMKjAIDGowCAyqMAgM6jAICELAUA0qMAgNajAIDaowCA7xAAAN6jAIDiowCAo90DAOajAIDqowCA7qMAgPKjAICmmQMApZEDAPajAICrjQMAqo0DAPqjAID+owCArz0CAK49AgCtPQIArD0CAAKkAIAGpACACqQAgA6kAIASpACAFqQAgBqkAIDvKD4AHqQAgOE8PgAipACA4zgBAIApAACBFQAAghEAACqkAICzMQIAvsgEAITABAAupACAMqQAgLYpAgC1IQIANqQAgLvNAQC6zQEAOqQAgD6kAIC/dQEAvskBAL3BAQC8yQEAqOkFAKnpBQCq+QUAq/kFAKzpBQCt6QUArjkGAK85BgC2owCAJqQAgIaIAACHQAMAQqQAgEakAIBKpACATqQAgLjRBgC52QYAuuEGALvhBgC8kQYAvZEGAL6RBgC/kQYAsEkGALFJBgCyXQYAs1UGALRNBgC18QYAtvEGALfxBgCjcQUAUqQAgFakAIBapACAXqQAgKZpBQClYQUAYqQAgKuNBgCqjQYAZqQAgGqkAICvNQYArokGAK2BBgCsiQYAbqQAgLPRBwBypACAdqQAgLbxBwB6pACAfqQAgLXBBwC60QcAu90HAIKkAICGpACAvrkHAL+5BwC8xQcAvbkHALhpBgC5aQYAuokGALuJBgC8mQYAvZkGAL6JBgC/iQYAsBEGALEdBgCyFQYAs2kGALR5BgC1eQYAtmkGALdhBgCoSQYAqVUGAKpdBgCrVQYArE0GAK11BgCucQYAr3EGAEajAICCHQAAgR0AAIAdAACKpACAjqQAgJKkAIC+cAEAo5UGAJqkAICGKAAAh0gBAJ6kAICmtQYApYUGAKKkAICrmQYAqpUGAKakAICqpACAr/0GAK79BgCt/QYArIEGAK6kAICzFQYAsqQAgLakAIC2PQYAuqQAgL6kAIC1NQYAutkBALvZAQDCpACAxqQAgL59AQC/ZQEAvH0BAL11AQCovQUAqckFAKrZBQCr0QUArPkFAK35BQCuKQIArykCAMqkAIDOpACA0qQAgNakAICMAAAA2qQAgN6kAIDipACAuO0CALmFAgC6gQIAu4ECALyFAgC9jQIAvrECAL+xAgCwWQIAsVkCALLtAgCz5QIAtP0CALXlAgC25QIAt9UCAKNRBQDmpACA6qQAgO6kAIDypACApnkFAKVxBQD2pACAq50CAKqdAgD6pACA/qQAgK8hAgCuOQIArTECAKw5AgCBbQAAgG0AAAKlAICCBQAAvlwMAAqlAIAOpQCA79AGAITsAwDhHAUAEqUAgOP8BwAWpQCAGqUAgIbYDACHvAwAqIUCAKmVAgCqlQIAq6UCAKy9AgCt1QIArtECAK/RAgAepQCAIqUAgCalAIAqpQCALqUAgDKlAIA2pQCAOqUAgLh1AQC5fQEAunUBALvJAQC82QEAvdkBAL7JAQC/wQEAsLUCALG9AgCygQIAs4ECALRRAQC1UQEAtlEBALdRAQA+pQCAhAQNAEKlAIBGpQCAvhwMAEqlAIDvHAAA76AGAOGQAQDhRAcA43AGAOOYBgBOpQCAUqUAgFalAIBapQCAs10CAF6lAIBipQCAZqUAgGqlAIC2FQIAtXUCAG6lAIC7OQIAujECAHKlAIB6pQCAv9UBAL7VAQC9FQIAvBUCAKOdDQAGpQCAdqUAgH6lAICCpQCAptUNAKW1DQCGpQCAq/kNAKrxDQCGCAMAh2ADAK8VDgCuFQ4ArdUNAKzVDQCAkQ8AgZkPAIKhDwCzpQ4AiqUAgLWhDgC2eQ8AjqUAgJKlAICWpQCAukUPALtdDwC8RQ8AvU0PAL5FDwC }, { "type": "Strip", "strip_left": false, "strip_right": true }, { "type": "Replace", "pattern": { "Regex": " {2,}" }, "content": "▁" } ] }, "pre_tokenizer": { "type": "Metaspace", "replacement": "▁", "add_prefix_space": true, "prepend_scheme": "first" }, "post_processor": { "type": "TemplateProcessing", "single": [ { "Sequence": { "id": "A", "type_id": 0 } }, { "SpecialToken": { "id": "</s>", "type_id": 0 } } ], "pair": [ { "Sequence": { "id": "A", "type_id": 0 } }, { "SpecialToken": { "id": "</s>", "type_id": 0 } }, { "Sequence": { "id": "B", "type_id": 0 } }, { "SpecialToken": { "id": "</s>", "type_id": 0 } } ], "special_tokens": { "</s>": { "id": "</s>", "ids": [ 1 ], "tokens": [ "</s>" ] } } }, "decoder": { "type": "Metaspace", "replacement": "▁", "add_prefix_space": true, "prepend_scheme": "always" }, "model": { "type": "Unigram", "unk_id": 2, "vocab": [ [ "<pad>", 0.0 ], [ "</s>", 0.0 ], [ "<unk>", 0.0 ], [ "▁", -2.0122928619384766 ], [ "X", -2.486478805541992 ], [ ".", -3.5449328422546387 ], [ ",", -3.649247407913208 ], [ "s", -3.9033992290496826 ], [ "▁the", -3.9598512649536133 ], [ "a", -4.097104549407959 ], [ ":", -4.414328098297119 ], [ "▁and", -4.420670986175537 ], [ "▁to", -4.4523234367370605 ], [ "▁of", -4.572070121765137 ], [ "▁fill", -4.575019836425781 ], [ "e", -4.674920082092285 ], [ "▁in", -4.812063694000244 ], [ "t", -5.063905715942383 ], [ "-", -5.129043102264404 ], [ "▁is", -5.283425331115723 ], [ "▁de", -5.344141960144043 ], [ "▁for", -5.3930158615112305 ], [ "’", -5.4228339195251465 ], [ "i", -5.469857692718506 ], [ "▁that", -5.576240539550781 ], [ "▁you", -5.596375465393066 ], [ "d", -5.6047282218933105 ], [ "▁I", -5.6640448570251465 ], [ "▁with", -5.703730583190918 ], [ "n", -5.737886905670166 ], [ "▁on", -5.784142971038818 ], [ "'", -5.828996181488037 ], [ "o", -5.925558090209961 ], [ "▁are", -5.931313991546631 ], [ "▁it", -5.939518928527832 ], [ "en", -5.9465556144714355 ], [ "▁be", -5.9556708335876465 ], [ "▁The", -5.990020751953125 ], [ "▁as", -6.057407379150391 ], [ "▁your", -6.132311820983887 ], [ "l", -6.139498710632324 ], [ "▁(", -6.184796333312988 ], [ "▁or", -6.241950035095215 ], [ "▁have", -6.27459192276001 ], [ "▁at", -6.327472686767578 ], [ "▁from", -6.349645137786865 ], [ "▁an", -6.350090980529785 ], [ "▁was", -6.350385665893555 ], [ "▁this", -6.352563381195068 ], [ "er", -6.3604278564453125 ], [ "▁la", -6.3624043464660645 ], [ "m", -6.375206470489502 ], [ "r", -6.376530170440674 ], [ "ing", -6.3778581619262695 ], [ "▁can", -6.387146472930908 ], [ "!", -6.421379566192627 ], [ "▁will", -6.423982620239258 ], [ "▁by", -6.44155216217041 ], [ "?", -6.585887432098389 ], [ "▁not", -6.5959086418151855 ], [ "re", -6.620072364807129 ], [ ")", -6.63656759262085 ], [ "▁we", -6.643022060394287 ], [ "y", -6.654535293579102 ], [ "▁und", -6.741473197937012 ], [ "▁has", -6.7602033615112305 ], [ "▁all", -6.768176555633545 ], [ "▁die", -6.8641204833984375 ], [ "▁but", -6.906830310821533 ], [ "▁our", -6.909878730773926 ], [ "▁their", -6.91325044631958 ], [ "▁A", -6.915814399719238 ], [ "▁more", -6.918668746948242 ], [ "▁un", -6.924930572509766 ], [ "▁der", -6.925402641296387 ], [ "c", -6.925714015960693 ], [ "u", -6.932939052581787 ], [ "in", -6.934063911437988 ], [ "▁so", -6.947050094604492 ], [ "▁they", -6.989297866821289 ], [ "▁one", -7.012735843658447 ], [ "▁about", -7.071486473083496 ], [ "▁my", -7.072140693664551 ], [ "ul", -7.076492786407471 ], [ "▁which", -7.097039222717285 ], [ "à", -7.099997520446777 ], [ "▁In", -7.100254535675049 ], [ "/", -7.100865840911865 ], [ "he", -7.104752540588379 ], [ "f", -7.110044002532959 ], [ "▁le", -7.112937927246094 ], [ "▁out", -7.128556728363037 ], [ "▁also", -7.133583068847656 ], [ "▁des", -7.156766414642334 ], [ "▁It", -7.162121295928955 ], [ "▁up", -7.1723432540893555 ], [ "▁\"", -7.172809600830078 ], [ "▁time", -7.178046703338623 ], [ "ă", -7.183253765106201 ], [ "if", -7.185171127319336 ], [ "▁This", -7.191652297973633 ], [ "▁We", -7.223267078399658 ], [ "p", -7.224130153656006 ], [ "▁do", -7.228212356567383 ], [ "–", -7.235409736633301 ], [ "▁“", -7.238142013549805 ], [ "on", -7.240827560424805 ], [ "h", -7.2543206214904785 ], [ "▁si", -7.276725769042969 ], [ "le", -7.2994256019592285 ], [ "▁les", -7.312957286834717 ], [ "▁în", -7.314571857452393 ], [ "▁his", -7.324767112731934 ], [ "▁who", -7.35105562210083 ], [ "▁like", -7.371364116668701 ], [ "b", -7.375369071960449 ], [ "▁when", -7.380199432373047 ], [ ";", -7.380846977233887 ], [ "▁been", -7.38668966293335 ], [ "▁other", -7.388518333435059 ], [ "ly", -7.394660949707031 ], [ "\"", -7.407205104827881 ], [ "g", -7.407997131347656 ], [ "▁cu", -7.415276527404785 ], [ "▁care", -7.432408332824707 ], [ "▁what", -7.433043003082275 ], [ "▁new", -7.4370903968811035 ], [ "or", -7.445409774780273 ], [ "▁some", -7.461953639984131 ], [ "▁get", -7.479001998901367 ], [ "▁were", -7.491549491882324 ], [ "▁just", -7.492495536804199 ], [ "▁there", -7.493194103240967 ], [ "▁would", -7.494382381439209 ], [ "S", -7.4974141120910645 ], [ "▁them", -7.513596057891846 ], [ "▁any", -7.520544052124023 ], [ ").", -7.521052360534668 ], [ "al", -7.523056983947754 ], [ "▁into", -7.527902603149414 ], [ "▁me", -7.528337001800537 ], [ "▁had", -7.532425403594971 ], [ "▁se", -7.5451483726501465 ], [ "▁make", -7.5827131271362305 ], [ "at", -7.589433670043945 ], [ "▁than", -7.592360019683838 ], [ "▁du", -7.595852375030518 ], [ "▁over", -7.6078782081604 ], [ "▁You", -7.626111030578613 ], [ "▁how", -7.635554313659668 ], [ "▁no", -7.63729190826416 ], [ "▁people", -7.639947414398193 ], [ "an", -7.64084005355835 ], [ "”", -7.644528865814209 ], [ "é", -7.646921157836914 ], [ "it", -7.648641109466553 ], [ "▁If", -7.648687839508057 ], [ "k", -7.6605634689331055 ], [ "▁pe", -7.662139415740967 ], [ "is", -7.66726016998291 ], [ "▁her", -7.6733808517456055 ], [ "▁work", -7.680386543273926 ], [ "ve", -7.687412738800049 ], [ "▁only", -7.69785737991333 ], [ "▁may", -7.702393531799316 ], [ "▁its", -7.702449798583984 ], [ "▁first", -7.704373836517334 ], [ "▁most", -7.708309173583984 ], [ "▁well", -7.708758354187012 ], [ "▁use", -7.715085983276367 ], [ "▁zu", -7.718777656555176 ], [ "▁pour", -7.736708164215088 ], [ "z", -7.745654106140137 ], [ "il", -7.745913982391357 ], [ "▁need", -7.74778938293457 ], [ "▁these", -7.763317584991455 ], [ "▁din", -7.769891262054443 ], [ "▁den", -7.775663375854492 ], [ "▁us", -7.778133869171143 ], [ "able", -7.779712200164795 ], [ "▁S", -7.781893730163574 ], [ "▁mit", -7.792516231536865 ], [ "▁very", -7.79970645904541 ], [ "▁am", -7.814100742340088 ], [ "&", -7.829529285430908 ], [ "▁au", -7.83012056350708 ], [ "▁many", -7.83834171295166 ], [ "▁mai", -7.84363317489624 ], [ "A", -7.849830150604248 ], [ "th", -7.855541229248047 ], [ "▁through", -7.859585285186768 ], [ "▁pentru", -7.86391544342041 ], [ "▁two", -7.873607158660889 ], [ "▁von", -7.874959945678711 ], [ "▁way", -7.887117385864258 ], [ "ll", -7.887749195098877 ], [ "I", -7.891303539276123 ], [ "▁ce", -7.9015631675720215 ], [ "▁și", -7.904444694519043 ], [ "▁help", -7.907405853271484 ], [ "▁best", -7.907911777496338 ], [ "),", -7.908212184906006 ], [ "un", -7.925017833709717 ], [ "▁years", -7.925964832305908 ], [ "▁2", -7.9282684326171875 ], [ "▁C", -7.936962604522705 ], [ "▁nu", -7.939520835876465 ], [ "▁good", -7.943995952606201 ], [ "v", -7.94746732711792 ], [ "▁1", -7.94765567779541 ], [ "w", -7.947978496551514 ], [ "▁das", -7.960538864135742 ], [ "▁ca", -7.962430477142334 ], [ "▁where", -7.964908123016357 ], [ "▁know", -7.96622896194458 ], [ "▁year", -7.971063613891602 ], [ "▁He", -7.974609375 ], [ "▁see", -7.980011463165283 ], [ "▁für", -7.984004497528076 ], [ "▁auf", -7.984249114990234 ], [ "▁3", -7.984433650970459 ], [ "de", -7.985401153564453 ], [ "est", -8.002091407775879 ], [ "▁back", -8.007022857666016 ], [ "▁such", -8.008523941040039 ], [ "▁should", -8.011754989624023 ], [ "x", -8.015050888061523 ], [ "▁after", -8.01761245727539 ], [ "▁could", -8.019674301147461 ], [ "▁ist", -8.020784378051758 ], [ "▁now", -8.022845268249512 ], [ "▁much", -8.023111343383789 ], [ "and", -8.02390193939209 ], [ "...", -8.030110359191895 ], [ "▁home", -8.036273956298828 ], [ "to", -8.03821086883545 ], [ "▁ein", -8.04833984375 ], [ "▁even", -8.048656463623047 ], [ "▁que", -8.049829483032227 ], [ "▁day", -8.051553726196289 ], [ "▁take", -8.054189682006836 ], [ "▁want", -8.054435729980469 ], [ "▁For", -8.06217098236084 ], [ "▁said", -8.063249588012695 ], [ "▁sur", -8.073471069335938 ], [ "▁une", -8.077030181884766 ], [ "▁să", -8.082921028137207 ], [ "▁dans", -8.084549903869629 ], [ "▁great", -8.088057518005371 ], [ "▁este", -8.08947467803955 ], [ "▁because", -8.094311714172363 ], [ "▁information", -8.104085922241211 ], [ "ului", -8.105451583862305 ], [ "▁find", -8.112174987792969 ], [ "C", -8.119946479797363 ], [ "▁she", -8.125317573547363 ], [ "▁im", -8.126056671142578 ], [ "ation", -8.130115509033203 ], [ "▁then", -8.13021469116211 ], [ "▁est", -8.13099479675293 ], [ "▁par", -8.138585090637207 ], [ "▁used", -8.141871452331543 ], [ "▁E", -8.146790504455566 ], [ "▁made", -8.149978637695312 ], [ "▁So", -8.15785026550293 ], [ "am", -8.16288948059082 ], [ "▁eine", -8.165464401245117 ], [ "▁şi", -8.168368339538574 ], [ "▁business", -8.17335033416748 ], [ "▁right", -8.173593521118164 ], [ "▁here", -8.176125526428223 ], [ "▁being", -8.184967041015625 ], [ "▁B", -8.185355186462402 ], [ "▁those", -8.185736656188965 ], [ "▁before", -8.194721221923828 ], [ "▁And", -8.199501037597656 ], [ "▁P", -8.200712203979492 ], [ "ers", -8.200922012329102 ], [ "▁don", -8.204029083251953 ], [ "B", -8.20487117767334 ], [ "▁life", -8.206265449523926 ], [ "▁go", -8.209736824035645 ], [ "▁As", -8.210551261901855 ], [ "▁M", -8.221170425415039 ], [ "▁each", -8.22955322265625 ], [ "▁qui", -8.23323917388916 ], [ "▁place", -8.236248970031738 ], [ "com", -8.237479209899902 ], [ "ant", -8.252915382385254 ], [ "▁sich", -8.255932807922363 ], [ "▁There", -8.261948585510254 ], [ "ar", -8.264991760253906 ], [ "▁Sie", -8.273868560791016 ], [ "▁own", -8.277531623840332 ], [ "▁part", -8.279440879821777 ], [ "ent", -8.281047821044922 ], [ "▁world", -8.28173542022705 ], [ "ment", -8.282004356384277 ], [ "▁while", -8.294474601745605 ], [ "▁But", -8.295366287231445 ], [ "▁around", -8.300799369812012 ], [ "▁L", -8.301082611083984 ], [ "us", -8.304039001464844 ], [ "▁plus", -8.313054084777832 ], [ "▁To", -8.313691139221191 ], [ "▁5", -8.31412410736084 ], [ "▁high", -8.31862735748291 ], [ "▁long", -8.319378852844238 ], [ "D", -8.320075035095215 ], [ "▁D", -8.320279121398926 ], [ "▁really", -8.322924613952637 ], [ "▁nicht", -8.332040786743164 ], [ "▁Le", -8.335328102111816 ], [ "▁service", -8.3412504196167 ], [ "▁4", -8.342093467712402 ], [ "▁different", -8.342538833618164 ], [ "▁Die", -8.348092079162598 ], [ "▁think", -8.353771209716797 ], [ "—", -8.355998039245605 ], [ "▁auch", -8.357160568237305 ], [ "▁look", -8.362202644348145 ], [ "▁both", -8.366817474365234 ], [ "lor", -8.36687183380127 ], [ "▁down", -8.367999076843262 ], [ "ten", -8.368885040283203 ], [ "▁La", -8.378066062927246 ], [ "▁off", -8.380044937133789 ], [ "▁vous", -8.380541801452637 ], [ "▁They", -8.381462097167969 ], [ "M", -8.383248329162598 ], [ "▁pas", -8.384513854980469 ], [ "▁data", -8.385709762573242 ], [ "▁T", -8.386754989624023 ], [ "▁love", -8.388101577758789 ], [ "▁every", -8.390009880065918 ], [ "▁10", -8.391179084777832 ], [ "▁last", -8.392083168029785 ], [ "▁same", -8.393481254577637 ], [ "▁using", -8.395487785339355 ], [ "▁free", -8.408831596374512 ], [ "▁dem", -8.40894889831543 ], [ "▁still", -8.409984588623047 ], [ "ate", -8.410931587219238 ], [ "ist", -8.415611267089844 ], [ "▁between", -8.420283317565918 ], [ "P", -8.420982360839844 ], [ "be", -8.428167343139648 ], [ "▁available", -8.429443359375 ], [ "man", -8.432978630065918 ], [ "▁company", -8.439678192138672 ], [ "▁G", -8.441640853881836 ], [ "▁experience", -8.444950103759766 ], [ "▁going", -8.449073791503906 ], [ "▁site", -8.453832626342773 ], [ "j", -8.455142974853516 ], [ "are", -8.456900596618652 ], [ "▁set", -8.470661163330078 ], [ "2", -8.473684310913086 ], [ "▁system", -8.474678039550781 ], [ "▁important", -8.476791381835938 ], [ "▁few", -8.482437133789062 ], [ "▁fi", -8.482551574707031 ], [ "ich", -8.483301162719727 ], [ "▁What", -8.488649368286133 ], [ "▁services", -8.502433776855469 ], [ "▁under", -8.502569198608398 ], [ "▁When", -8.50308895111084 ], [ "▁online", -8.50699520111084 ], [ "▁New", -8.51494312286377 ], [ "▁come", -8.524871826171875 ], [ "▁provide", -8.525650024414062 ], [ "F", -8.526449203491211 ], [ "▁team", -8.52782154083252 ], [ "▁always", -8.529409408569336 ], [ "▁De", -8.530412673950195 ], [ "▁că", -8.532517433166504 ], [ "▁him", -8.53586196899414 ], [ "▁F", -8.538305282592773 ], [ "▁things", -8.550079345703125 ], [ "▁including", -8.550943374633789 ], [ "▁support", -8.552608489990234 ], [ "▁number", -8.554113388061523 ], [ "T", -8.557183265686035 ], [ "▁during", -8.55886459350586 ], [ "▁family", -8.560463905334473 ], [ "▁little", -8.561317443847656 ], [ "▁three", -8.567726135253906 ], [ "▁water", -8.56810188293457 ], [ "▁man", -8.569759368896484 ], [ "▁An", -8.57192611694336 ], [ "based", -8.572155952453613 ], [ "▁R", -8.57442855834961 ], [ "▁sau", -8.574433326721191 ], [ "▁avec", -8.576035499572754 ], [ "▁better", -8.576830863952637 ], [ "▁„", -8.582253456115723 ], [ "▁too", -8.58635425567627 ], [ "ge", -8.586719512939453 ], [ "▁must", -8.589736938476562 ], [ "▁per", -8.589916229248047 ], [ "ele", -8.590399742126465 ], [ "▁oder", -8.59264850616455 ], [ "au", -8.59555435180664 ], [ "▁aus", -8.595727920532227 ], [ "▁werden", -8.598653793334961 ], [ "▁does", -8.599140167236328 ], [ "▁without", -8.599270820617676 ], [ "▁ou", -8.599929809570312 ], [ "▁design", -8.60101318359375 ], [ "▁va", -8.605440139770508 ], [ "▁did", -8.615679740905762 ], [ "▁O", -8.619062423706055 ], [ "▁U", -8.623565673828125 ], [ "up", -8.62901496887207 ], [ "▁end", -8.63367748260498 ], [ "▁local", -8.636231422424316 ], [ "▁next", -8.638967514038086 ], [ "▁sure", -8.64098072052002 ], [ "▁lot", -8.64644718170166 ], [ "▁Re", -8.647016525268555 ], [ "▁top", -8.647642135620117 ], [ "▁Our", -8.656886100769043 ], [ "▁small", -8.656978607177734 ], [ "▁full", -8.659418106079102 ], [ "▁something", -8.662886619567871 ], [ "ung", -8.666722297668457 ], [ "▁vor", -8.673250198364258 ], [ "E", -8.673337936401367 ], [ "▁give", -8.67603588104248 ], [ "▁might", -8.67660903930664 ], [ "▁another", -8.679330825805664 ], [ "▁6", -8.680779457092285 ], [ "▁All", -8.681318283081055 ], [ "▁process", -8.681672096252441 ], [ "L", -8.682575225830078 ], [ "▁found", -8.68941593170166 ], [ "▁sind", -8.690044403076172 ], [ "▁since", -8.69528865814209 ], [ "▁With", -8.695560455322266 ], [ "K", -8.696988105773926 ], [ "um", -8.701016426086426 ], [ "▁within", -8.701669692993164 ], [ "▁post", -8.706608772277832 ], [ "▁car", -8.709365844726562 ], [ "une", -8.714099884033203 ], [ "▁N", -8.715041160583496 ], [ "▁J", -8.715597152709961 ], [ "ic", -8.71823787689209 ], [ "R", -8.722309112548828 ], [ "ter", -8.727437019348145 ], [ "ur", -8.728265762329102 ], [ "▁She", -8.73131275177002 ], [ "▁public", -8.732009887695312 ], [ "▁keep", -8.735784530639648 ], [ "▁H", -8.736178398132324 ], [ "▁order", -8.740762710571289 ], [ "▁start", -8.742195129394531 ], [ "ez", -8.74746322631836 ], [ "▁‘", -8.749832153320312 ], [ "uri", -8.751104354858398 ], [ "▁20", -8.752482414245605 ], [ "▁On", -8.753515243530273 ], [ "▁offer", -8.763005256652832 ], [ "▁quality", -8.764988899230957 ], [ "▁working", -8.769987106323242 ], [ "▁No", -8.770307540893555 ], [ "▁That", -8.775156021118164 ], [ "▁game", -8.7863187789917 ], [ "▁bei", -8.786642074584961 ], [ "▁today", -8.788661003112793 ], [ "▁never", -8.794586181640625 ], [ "▁week", -8.79587173461914 ], [ "▁St", -8.797786712646484 ], [ "▁feel", -8.799317359924316 ], [ "▁put", -8.801899909973145 ], [ "▁website", -8.80322265625 ], [ "Y", -8.804483413696289 ], [ "▁days", -8.804709434509277 ], [ "▁program", -8.805448532104492 ], [ "▁looking", -8.810463905334473 ], [ "▁K", -8.810808181762695 ], [ "▁students", -8.811436653137207 ], [ "▁create", -8.811800956726074 ], [ "▁change", -8.812616348266602 ], [ "▁book", -8.812932014465332 ], [ "ity", -8.813761711120605 ], [ "▁At", -8.815207481384277 ], [ "▁possible", -8.815670013427734 ], [ "▁sunt", -8.81651496887207 ], [ "▁7", -8.818120002746582 ], [ "▁real", -8.823369026184082 ], [ "▁al", -8.824172019958496 ], [ "▁making", -8.825371742248535 ], [ "▁Be", -8.825761795043945 ], [ "▁products", -8.82592487335205 ], [ "▁case", -8.82653522491455 ], [ "▁school", -8.8272066116333 ], [ "▁say", -8.830352783203125 ], [ "area", -8.832084655761719 ], [ "▁My", -8.833836555480957 ], [ "▁point", -8.834731101989746 ], [ "▁als", -8.83560848236084 ], [ "▁children", -8.836194038391113 ], [ "▁course", -8.844061851501465 ], [ "▁show", -8.847993850708008 ], [ "▁8", -8.849273681640625 ], [ "▁These", -8.849345207214355 ], [ "▁18", -8.851140975952148 ], [ "▁large", -8.851323127746582 ], [ "co", -8.854362487792969 ], [ "▁über", -8.854788780212402 ], [ "▁second", -8.856559753417969 ], [ "▁market", -8.859807014465332 ], [ "▁fost", -8.86048698425293 ], [ "▁easy", -8.863983154296875 ], [ "▁plan", -8.864302635192871 ], [ "▁project", -8.864927291870117 ], [ "G", -8.865178108215332 ], [ "W", -8.869574546813965 ], [ "3", -8.871939659118652 ], [ "▁son", -8.873332023620605 ], [ "la", -8.879053115844727 ], [ "▁face", -8.88137435913086 ], [ "▁needs", -8.88148021697998 ], [ "ch", -8.883138656616211 ], [ "▁personal", -8.88343620300293 ], [ "me", -8.886031150817871 ], [ "▁sont", -8.887377738952637 ], [ "▁je", -8.894930839538574 ], [ "▁non", -8.895471572875977 ], [ "▁got", -8.896591186523438 ], [ "▁Do", -8.897382736206055 ], [ "the", -8.89765453338623 ], [ "▁health", -8.89908504486084 ], [ "▁special", -8.90555477142334 ], [ ".\"", -8.907710075378418 ], [ "1", -8.907852172851562 ], [ "den", -8.908616065979004 ], [ "▁state", -8.909355163574219 ], [ "▁open", -8.91019058227539 ], [ "▁money", -8.91053581237793 ], [ "▁again", -8.913084983825684 ], [ "▁food", -8.913167953491211 ], [ "▁page", -8.914595603942871 ], [ "▁together", -8.91628360748291 ], [ "age", -8.919108390808105 ], [ "▁qu", -8.921928405761719 ], [ "hat", -8.922386169433594 ], [ "▁ver", -8.926993370056152 ], [ "▁W", -8.927785873413086 ], [ "▁away", -8.928759574890137 ], [ "▁wird", -8.931641578674316 ], [ "▁until", -8.934249877929688 ], [ "V", -8.934935569763184 ], [ "▁pre", -8.935851097106934 ], [ "▁One", -8.936429977416992 ], [ "▁product", -8.936561584472656 ], [ "▁often", -8.939326286315918 ], [ "▁wir", -8.944111824035645 ], [ "▁nach", -8.945127487182617 ], [ "▁include", -8.946555137634277 ], [ "▁um", -8.948204040527344 ], [ "▁room", -8.953709602355957 ], [ "▁group", -8.953767776489258 ], [ "▁name", -8.954949378967285 ], [ "ce", -8.955448150634766 ], [ "H", -8.956180572509766 ], [ "N", -8.958139419555664 ], [ "▁person", -8.958183288574219 ], [ "▁social", -8.958606719970703 ], [ "▁list", -8.963666915893555 ], [ "▁How", -8.964127540588379 ], [ "▁why", -8.96571159362793 ], [ "▁community", -8.965995788574219 ], [ "▁contact", -8.973031044006348 ], [ "­", -8.9755859375 ], [ "▁co", -8.979683876037598 ], [ "▁play", -8.983960151672363 ], [ "▁having", -8.984169960021973 ], [ "▁power", -8.986917495727539 ], [ "▁call", -8.991690635681152 ], [ "▁against", -8.991816520690918 ], [ "▁become", -8.997780799865723 ], [ "▁cost", -9.003793716430664 ], [ "▁V", -9.004593849182129 ], [ "▁research", -9.006913185119629 ], [ "▁12", -9.007307052612305 ], [ "▁wie", -9.008277893066406 ], [ "der", -9.008386611938477 ], [ "▁thing", -9.014028549194336 ], [ "▁along", -9.017301559448242 ], [ "4", -9.017330169677734 ], [ "▁access", -9.020391464233398 ], [ "▁level", -9.020505905151367 ], [ "▁price", -9.022817611694336 ], [ "▁einen", -9.023714065551758 ], [ "▁side", -9.026359558105469 ], [ "▁Un", -9.026851654052734 ], [ "▁means", -9.030416488647461 ], [ "(", -9.032341957092285 ], [ "▁big", -9.034374237060547 ], [ "▁God", -9.036499977111816 ], [ "▁dass", -9.037314414978027 ], [ "im", -9.037374496459961 ], [ "▁30", -9.037432670593262 ], [ "▁event", -9.041665077209473 ], [ "▁development", -9.042060852050781 ], [ "▁form", -9.04226303100586 ], [ "▁read", -9.042579650878906 ], [ "▁hand", -9.043194770812988 ], [ "▁control", -9.04446792602539 ], [ "▁However", -9.046320915222168 ], [ "▁done", -9.048060417175293 ], [ "▁job", -9.051692008972168 ], [ "▁hard", -9.056619644165039 ], [ "▁war", -9.057538032531738 ], [ "▁area", -9.0584135055542 ], [ "▁add", -9.0586576461792 ], [ "▁votre", -9.0593900680542 ], [ "▁live", -9.059494018554688 ], [ "▁range", -9.060099601745605 ], [ "▁After", -9.060164451599121 ], [ "▁Les", -9.060513496398926 ], [ "▁far", -9.064413070678711 ], [ "ver", -9.064727783203125 ], [ "▁old", -9.069576263427734 ], [ "▁perfect", -9.06976318359375 ], [ "▁15", -9.070429801940918 ], [ "▁space", -9.073654174804688 ], [ "▁house", -9.074068069458008 ], [ "ine", -9.07408618927002 ], [ "▁enough", -9.074334144592285 ], [ "0", -9.075824737548828 ], [ "▁several", -9.077119827270508 ], [ "The", -9.081155776977539 ], [ "mm", -9.085619926452637 ], [ "▁University", -9.08637523651123 ], [ "▁diese", -9.087566375732422 ], [ "▁Co", -9.088335990905762 ], [ "▁comes", -9.088497161865234 ], [ "▁across", -9.088857650756836 ], [ "▁already", -9.090097427368164 ], [ ",”", -9.090341567993164 ], [ "▁body", -9.09276294708252 ], [ "▁Das", -9.094594955444336 ], [ "▁einer", -9.095956802368164 ], [ "▁left", -9.09921646118164 ], [ "▁future", -9.105711936950684 ], [ "▁times", -9.106670379638672 ], [ "▁dar", -9.109651565551758 ], [ "▁simple", -9.110408782958984 ], [ "ry", -9.112407684326172 ], [ "▁getting", -9.113155364990234 ], [ "▁try", -9.115362167358398 ], [ "ți", -9.116897583007812 ], [ "ness", -9.120043754577637 ], [ "▁makes", -9.120377540588379 ], [ "▁past", -9.120619773864746 ], [ "ca", -9.12130069732666 ], [ "▁light", -9.122207641601562 ], [ "▁Der", -9.122997283935547 ], [ "▁run", -9.125843048095703 ], [ "▁four", -9.126943588256836 ], [ "ance", -9.130500793457031 ], [ "▁ever", -9.131503105163574 ], [ "▁einem", -9.131816864013672 ], [ "▁below", -9.133723258972168 ], [ "O", -9.134073257446289 ], [ "▁9", -9.137282371520996 ], [ "▁learn", -9.14004135131836 ], [ "out", -9.140358924865723 ], [ "▁video", -9.143178939819336 ], [ "▁etc", -9.146929740905762 ], [ "▁«", -9.148795127868652 ], [ "▁zum", -9.149712562561035 ], [ "▁kann", -9.1504487991333 ], [ "▁minutes", -9.151180267333984 ], [ "▁example", -9.154194831848145 ], [ "▁nous", -9.154619216918945 ], [ "▁Se", -9.157441139221191 ], [ "▁sie", -9.159955024719238 ], [ "▁industry", -9.161614418029785 ], [ "▁problem", -9.162016868591309 ], [ "J", -9.162480354309082 ], [ "▁country", -9.163366317749023 ], [ "▁fact", -9.164189338684082 ], [ "▁type", -9.164190292358398 ], [ "ner", -9.164238929748535 ], [ "▁companies", -9.165864944458008 ], [ "▁line", -9.169849395751953 ], [ "▁city", -9.172713279724121 ], [ "▁check", -9.173710823059082 ], [ "▁doing", -9.174406051635742 ], [ "elle", -9.175037384033203 ], [ "▁fun", -9.176549911499023 ], [ "▁En", -9.177546501159668 ], [ "▁Your", -9.178601264953613 ], [ "ling", -9.181450843811035 ], [ "▁share", -9.18185806274414 ], [ "ile", -9.182005882263184 ], [ "▁actually", -9.187544822692871 ], [ "▁value", -9.187751770019531 ], [ "zi", -9.188661575317383 ], [ "▁ab", -9.1898832321167 ], [ "▁offers", -9.1905517578125 ], [ "▁less", -9.190573692321777 ], [ "▁night", -9.193560600280762 ], [ "▁Dr", -9.19518756866455 ], [ "▁started", -9.195454597473145 ], [ "▁least", -9.198020935058594 ], [ "▁short", -9.198562622070312 ], [ "▁main", -9.201143264770508 ], [ "▁single", -9.202939987182617 ], [ "▁though", -9.203780174255371 ], [ "▁prin", -9.203930854797363 ], [ "time", -9.20531177520752 ], [ "▁hours", -9.206608772277832 ], [ "▁others", -9.206849098205566 ], [ "▁called", -9.20730209350586 ], [ "▁visit", -9.208869934082031 ], [ "▁bit", -9.209009170532227 ], [ "ée", -9.210821151733398 ], [ "▁customers", -9.211383819580078 ], [ "▁music", -9.212000846862793 ], [ "▁members", -9.217191696166992 ], [ "ies", -9.21743392944336 ], [ "▁pay", -9.219176292419434 ], [ "nd", -9.219744682312012 ], [ "▁once", -9.221125602722168 ], [ "gen", -9.2217378616333 ], [ "▁können", -9.222976684570312 ], [ "▁low", -9.223771095275879 ], [ "▁durch", -9.227394104003906 ], [ "▁story", -9.228075981140137 ], [ "▁understand", -9.22953987121582 ], [ "“", -9.229856491088867 ], [ "▁Am", -9.231831550598145 ], [ "▁didn", -9.234603881835938 ], [ "▁content", -9.237217903137207 ], [ "son", -9.24180793762207 ], [ "▁building", -9.242242813110352 ], [ "▁result", -9.242605209350586 ], [ "▁aux", -9.243107795715332 ], [ "▁complete", -9.244999885559082 ], [ "▁doesn", -9.24510669708252 ], [ "▁haben", -9.246070861816406 ], [ "▁questions", -9.24661636352539 ], [ "line", -9.247077941894531 ], [ "▁technology", -9.247429847717285 ], [ "▁Pro", -9.247976303100586 ], [ "▁current", -9.248504638671875 ], [ "▁won", -9.248883247375488 ], [ "▁let", -9.250710487365723 ], [ "▁features", -9.251978874206543 ], [ "▁please", -9.258262634277344 ], [ "5", -9.258519172668457 ], [ "▁above", -9.259394645690918 ], [ "ive", -9.262128829956055 ], [ "▁management", -9.262394905090332 ], [ "▁lui", -9.262539863586426 ], [ "her", -9.263057708740234 ], [ "▁training", -9.265711784362793 ], [ "▁everything", -9.2665433883667 ], [ "▁noch", -9.266846656799316 ], [ "▁came", -9.267708778381348 ], [ "▁web", -9.272823333740234 ], [ "▁ensure", -9.272987365722656 ], [ "▁months", -9.273130416870117 ], [ "▁art", -9.27313232421875 ], [ "▁sub", -9.274359703063965 ], [ "▁million", -9.274559020996094 ], [ "▁professional", -9.275035858154297 ], [ "▁results", -9.278368949890137 ], [ "▁kind", -9.278395652770996 ], [ "▁season", -9.279285430908203 ], [ "▁unique", -9.281067848205566 ], [ "ze", -9.284360885620117 ], [ "▁enjoy", -9.28487777709961 ], [ "▁early", -9.287765502929688 ], [ "▁major", -9.288202285766602 ], [ "▁yet", -9.29152774810791 ], [ "▁Ver", -9.293331146240234 ], [ "one", -9.296777725219727 ], [ "▁media", -9.29719352722168 ], [ "▁[", -9.30095100402832 ], [ "▁property", -9.302969932556152 ], [ "▁beautiful", -9.304466247558594 ], [ "▁given", -9.305286407470703 ], [ "▁due", -9.306716918945312 ], [ "▁government", -9.307181358337402 ], [ "▁nur", -9.30881404876709 ], [ "▁email", -9.309103012084961 ], [ "▁total", -9.311080932617188 ], [ "▁natural", -9.311264038085938 ], [ "▁test", -9.311450004577637 ], [ "▁provides", -9.311640739440918 ], [ "▁various", -9.312631607055664 ], [ "▁American", -9.315605163574219 ], [ "▁moment", -9.318109512329102 ], [ "▁air", -9.318952560424805 ], [ "▁idea", -9.319236755371094 ], [ "▁known", -9.319981575012207 ], [ "▁Il", -9.320504188537598 ], [ "▁friends", -9.320576667785645 ], [ "▁final", -9.320919036865234 ], [ "▁buy", -9.32139778137207 ], [ "▁specific", -9.322234153747559 ], [ "▁issues", -9.32454776763916 ], [ "▁took", -9.325233459472656 ], [ "▁mind", -9.326258659362793 ], [ "▁study", -9.32675838470459 ], [ "▁addition", -9.328418731689453 ], [ "▁size", -9.332446098327637 ], [ "▁pro", -9.334047317504883 ], [ "▁film", -9.33545970916748 ], [ "▁pot", -9.335636138916016 ], [ "▁thought", -9.338120460510254 ], [ "▁tell", -9.33890438079834 ], [ "▁While", -9.339675903320312 ], [ "▁head", -9.339983940124512 ], [ "▁clients", -9.340429306030273 ], [ "▁performance", -9.346199989318848 ], [ "▁question", -9.346835136413574 ], [ "▁whether", -9.347925186157227 ], [ "▁certain", -9.34826946258545 ], [ "▁model", -9.348764419555664 ], [ "▁following", -9.350926399230957 ], [ "▁energy", -9.354207992553711 ], [ "▁office", -9.354207992553711 ], [ "▁whole", -9.356687545776367 ], [ "▁bring", -9.356956481933594 ], [ "▁required", -9.35726261138916 ], [ "ţi", -9.358223915100098 ], [ "▁date", -9.358695030212402 ], [ "_", -9.358983039855957 ], [ "que", -9.359789848327637 ], [ "▁da", -9.360264778137207 ], [ "▁US", -9.36120319366455 ], [ "▁taking", -9.36143684387207 ], [ "go", -9.362788200378418 ], [ "▁living", -9.36341667175293 ], [ "▁someone", -9.363489151000977 ], [ "▁heart", -9.365120887756348 ], [ "▁key", -9.365775108337402 ], [ "▁areas", -9.366238594055176 ], [ "▁says", -9.367013931274414 ], [ "▁2018", -9.369132041931152 ], [ "▁month", -9.37012767791748 ], [ "▁Er", -9.371354103088379 ], [ "ste", -9.375077247619629 ], [ "▁11", -9.375179290771484 ], [ "▁front", -9.37528133392334 ], [ "▁Now", -9.37669563293457 ], [ "▁class", -9.376946449279785 ], [ "▁choose", -9.377082824707031 ], [ "pe", -9.37808609008789 ], [ "▁further", -9.379021644592285 ], [ "▁believe", -9.37936019897461 ], [ "of", -9.379590034484863 ], [ "▁among", -9.380990982055664 ], [ "sch", -9.381686210632324 ], [ "▁child", -9.382609367370605 ], [ "▁aber", -9.38376235961914 ], [ "▁Please", -9.386269569396973 ], [ "rea", -9.387248992919922 ], [ "▁later", -9.387272834777832 ], [ "▁amount", -9.388760566711426 ], [ "ice", -9.390128135681152 ], [ "▁National", -9.390177726745605 ], [ "▁style", -9.390748977661133 ], [ "▁tout", -9.391490936279297 ], [ "▁staff", -9.392939567565918 ], [ "▁white", -9.397933959960938 ], [ "▁ge", -9.399179458618164 ], [ "▁five", -9.400984764099121 ], [ "▁blog", -9.40109920501709 ], [ "▁designed", -9.40125846862793 ], [ "▁went", -9.402216911315918 ], [ "▁Da", -9.40268611907959 ], [ "▁general", -9.403801918029785 ], [ "▁rest", -9.403874397277832 ], [ "▁zur", -9.40579891204834 ], [ "▁quite", -9.405948638916016 ], [ "per", -9.40687084197998 ], [ "▁customer", -9.408379554748535 ], [ "▁close", -9.408747673034668 ], [ "▁Some", -9.41054630279541 ], [ "▁women", -9.41075611114502 ], [ "▁move", -9.410761833190918 ], [ "▁software", -9.411357879638672 ], [ "▁Ein", -9.413651466369629 ], [ "▁Ab", -9.413823127746582 ], [ "▁history", -9.413864135742188 ], [ "▁either", -9.41564655303955 ], [ "▁seen", -9.417396545410156 ], [ "▁card", -9.419726371765137 ], [ "▁City", -9.421541213989258 ], [ "▁hope", -9.421769142150879 ], [ "▁16", -9.422072410583496 ], [ "és", -9.422825813293457 ], [ "va", -9.423294067382812 ], [ "▁Al", -9.423827171325684 ], [ "▁especially", -9.424827575683594 ], [ "▁view", -9.426136016845703 ], [ "men", -9.427363395690918 ], [ "▁account", -9.427489280700684 ], [ "▁needed", -9.429777145385742 ], [ "▁United", -9.429789543151855 ], [ "]", -9.432387351989746 ], [ "▁yourself", -9.432788848876953 ], [ "▁100", -9.433059692382812 ], [ "▁receive", -9.433417320251465 ], [ "▁ideas", -9.43369197845459 ], [ "▁writing", -9.434585571289062 ], [ "▁simply", -9.434741973876953 ], [ "▁present", -9.435087203979492 ], [ "▁continue", -9.436107635498047 ], [ "▁application", -9.44115161895752 ], [ "▁build", -9.44187068939209 ], [ "▁turn", -9.44249439239502 ], [ "ated", -9.442923545837402 ], [ "▁everyone", -9.443060874938965 ], [ "cette", -9.443114280700684 ], [ "▁bien", -9.444964408874512 ], [ "less", -9.445222854614258 ], [ "▁Si", -9.445359230041504 ], [ "▁original", -9.446867942810059 ], [ "8", -9.44794750213623 ], [ "▁individual", -9.448895454406738 ], [ "tre", -9.449433326721191 ], [ "▁works", -9.45171070098877 ], [ "▁options", -9.451821327209473 ], [ "▁May", -9.454456329345703 ], [ "▁Not", -9.454940795898438 ], [ "▁report", -9.455467224121094 ], [ "mer", -9.457239151000977 ], [ "▁human", -9.459118843078613 ], [ "▁provided", -9.459603309631348 ], [ "▁By", -9.460925102233887 ], [ "▁series", -9.462006568908691 ], [ "7", -9.46226692199707 ], [ "▁modern", -9.463875770568848 ], [ "▁meet", -9.463921546936035 ], [ "▁50", -9.464119911193848 ], [ "▁25", -9.46969985961914 ], [ "▁color", -9.470091819763184 ], [ "▁download", -9.470109939575195 ], [ "▁Here", -9.471144676208496 ], [ "6", -9.471323013305664 ], [ "▁poate", -9.471449851989746 ], [ "▁În", -9.472321510314941 ], [ "▁phone", -9.473695755004883 ], [ "▁likely", -9.474374771118164 ], [ "▁table", -9.476469993591309 ], [ "▁ma", -9.476551055908203 ], [ "▁Or", -9.479181289672852 ], [ "Z", -9.48026180267334 ], [ "▁19", -9.482215881347656 ], [ "▁insurance", -9.482544898986816 ], [ "▁anything", -9.483808517456055 ], [ "▁search", -9.485033988952637 ], [ "▁Ge", -9.48520565032959 ], [ "▁issue", -9.485564231872559 ], [ "▁includes", -9.485688209533691 ], [ "▁clear", -9.487342834472656 ], [ "les", -9.488021850585938 ], [ "▁almost", -9.488259315490723 ], [ "ilor", -9.48935317993164 ], [ "▁14", -9.490717887878418 ], [ "by", -9.494056701660156 ], [ "▁Du", -9.49624252319336 ], [ "▁mais", -9.497303009033203 ], [ "ier", -9.499163627624512 ], [ "▁law", -9.49924087524414 ], [ "▁added", -9.500134468078613 ], [ "▁con", -9.500962257385254 ], [ ",\"", -9.501530647277832 ], [ "▁ago", -9.502127647399902 ], [ "▁His", -9.504697799682617 ], [ "▁points", -9.504981994628906 ], [ "▁mult", -9.505581855773926 ], [ "▁financial", -9.506216049194336 ], [ "▁problems", -9.506428718566895 ], [ "▁however", -9.50648307800293 ], [ "▁events", -9.50675106048584 ], [ "▁half", -9.507889747619629 ], [ "ard", -9.511183738708496 ], [ "▁ask", -9.51156997680664 ], [ "▁version", -9.511631965637207 ], [ "end", -9.512478828430176 ], [ "▁created", -9.512639999389648 ], [ "▁lead", -9.512917518615723 ], [ "▁focus", -9.513853073120117 ], [ "▁increase", -9.515096664428711 ], [ "ex", -9.515118598937988 ], [ "▁allow", -9.515798568725586 ], [ "▁extra", -9.516464233398438 ], [ "▁24", -9.516692161560059 ], [ "▁credit", -9.516772270202637 ], [ "▁production", -9.516801834106445 ], [ "zu", -9.517256736755371 ], [ "▁black", -9.51754093170166 ], [ "▁systems", -9.518040657043457 ], [ "▁17", -9.518178939819336 ], [ "▁opportunity", -9.518531799316406 ], [ "▁bis", -9.519219398498535 ], [ "▁fast", -9.519807815551758 ], [ "ring", -9.521166801452637 ], [ "▁Don", -9.522114753723145 ], [ "▁via", -9.52242660522461 ], [ "fer", -9.5225248336792 ], [ "▁comme", -9.522799491882324 ], [ "▁popular", -9.523722648620605 ], [ "▁South", -9.524491310119629 ], [ "ating", -9.525003433227539 ], [ "▁State", -9.525198936462402 ], [ "ator", -9.525679588317871 ], [ "▁common", -9.525968551635742 ], [ "con", -9.526727676391602 ], [ "▁throughout", -9.527557373046875 ], [ "▁risk", -9.52774715423584 ], [ "▁young", -9.528532028198242 ], [ "▁Je", -9.528688430786133 ], [ "▁image", -9.52928352355957 ], [ "ha", -9.529376983642578 ], [ "▁third", -9.529587745666504 ], [ "▁taken", -9.530049324035645 ], [ "▁Z", -9.5314302444458 ], [ "▁dis", -9.5316162109375 ], [ "▁From", -9.533575057983398 ], [ "▁details", -9.534862518310547 ], [ "▁games", -9.53516674041748 ], [ "▁practice", -9.536040306091309 ], [ "che", -9.536151885986328 ], [ "▁security", -9.537364959716797 ], [ "▁medical", -9.537653923034668 ], [ "▁learning", -9.537806510925293 ], [ "▁material", -9.538509368896484 ], [ "▁international", -9.540703773498535 ], [ "▁forward", -9.541245460510254 ], [ "▁paper", -9.541247367858887 ], [ "▁action", -9.541348457336426 ], [ "▁file", -9.542378425598145 ], [ "▁oil", -9.543096542358398 ], [ "▁self", -9.54377555847168 ], [ "▁private", -9.545247077941895 ], [ "▁interest", -9.545559883117676 ], [ "bar", -9.546065330505371 ], [ "▁sale", -9.547115325927734 ], [ "▁stay", -9.547348976135254 ], [ "ke", -9.548089981079102 ], [ "▁San", -9.549053192138672 ], [ "▁matter", -9.549870491027832 ], [ "▁reason", -9.550254821777344 ], [ "ted", -9.55147647857666 ], [ "▁potential", -9.551742553710938 ], [ "▁brand", -9.552441596984863 ], [ "▁field", -9.55315113067627 ], [ "▁treatment", -9.553420066833496 ], [ "▁period", -9.553516387939453 ], [ "▁York", -9.553890228271484 ], [ "▁Park", -9.554738998413086 ], [ "▁acest", -9.556009292602539 ], [ "ou", -9.556926727294922 ], [ "▁Ce", -9.557014465332031 ], [ "▁ready", -9.558111190795898 ], [ "▁rather", -9.55860424041748 ], [ "▁outside", -9.560086250305176 ], [ "▁standard", -9.560121536254883 ], [ "▁located", -9.560770034790039 ], [ "▁marketing", -9.562313079833984 ], [ "cu", -9.564041137695312 ], [ "▁Can", -9.564562797546387 ], [ "▁education", -9.566105842590332 ], [ "use", -9.566640853881836 ], [ "▁role", -9.566828727722168 ], [ "▁men", -9.571505546569824 ], [ "▁probably", -9.571550369262695 ], [ "▁store", -9.57221508026123 ], [ "▁John", -9.572355270385742 ], [ "▁rate", -9.573956489562988 ], [ "▁code", -9.573994636535645 ], [ "▁kids", -9.574408531188965 ], [ "▁currently", -9.57552719116211 ], [ "▁near", -9.576475143432617 ], [ "▁sales", -9.576716423034668 ], [ "▁usually", -9.577012062072754 ], [ "▁activities", -9.577242851257324 ], [ "▁party", -9.577371597290039 ], [ "▁leur", -9.577434539794922 ], [ "▁particular", -9.577627182006836 ], [ "▁mehr", -9.577707290649414 ], [ "ill", -9.578757286071777 ], [ "▁percent", -9.579113006591797 ], [ "▁fait", -9.579537391662598 ], [ "▁happy", -9.579904556274414 ], [ "▁inside", -9.58005428314209 ], [ "▁save", -9.580510139465332 ], [ "▁skills", -9.580765724182129 ], [ "▁consider", -9.581025123596191 ], [ "▁recent", -9.58161735534668 ], [ "▁strong", -9.581781387329102 ], [ "▁position", -9.582076072692871 ], [ "▁knowledge", -9.582303047180176 ], [ "▁tax", -9.583868980407715 ], [ "▁users", -9.584261894226074 ], [ "und", -9.585564613342285 ], [ "▁coming", -9.585904121398926 ], [ "▁article", -9.585923194885254 ], [ "min", -9.586345672607422 ], [ "▁sein", -9.586555480957031 ], [ "▁travel", -9.586871147155762 ], [ "▁changes", -9.58765983581543 ], [ "▁impact", -9.588181495666504 ], [ "▁wanted", -9.588460922241211 ], [ "▁address", -9.5885591506958 ], [ "▁soon", -9.58873462677002 ], [ "▁North", -9.588915824890137 ], [ "ată", -9.589237213134766 ], [ "▁trying", -9.58985424041748 ], [ "▁app", -9.590612411499023 ], [ "▁School", -9.592510223388672 ], [ "▁Es", -9.592548370361328 ], [ "we", -9.59261703491211 ], [ "▁conditions", -9.59292984008789 ], [ "▁digital", -9.593293190002441 ], [ "▁similar", -9.594805717468262 ], [ "▁solution", -9.59514331817627 ], [ "▁location", -9.595183372497559 ], [ "▁Of", -9.595418930053711 ], [ "▁follow", -9.595842361450195 ], [ "▁red", -9.597526550292969 ], [ "▁review", -9.599202156066895 ], [ "▁skin", -9.599575996398926 ], [ "▁pretty", -9.600369453430176 ], [ "day", -9.600558280944824 ], [ "▁dé", -9.602072715759277 ], [ "▁cause", -9.602169036865234 ], [ "▁Sa", -9.602463722229004 ], [ "▁user", -9.602520942687988 ], [ "▁Man", -9.603377342224121 ], [ "”.", -9.604146003723145 ], [ "▁Just", -9.604366302490234 ], [ "▁faire", -9.604475021362305 ], [ "▁member", -9.605619430541992 ], [ "▁iar", -9.606892585754395 ], [ "▁higher", -9.607715606689453 ], [ "▁step", -9.607887268066406 ], [ "▁wide", -9.608185768127441 ], [ "▁uns", -9.608920097351074 ], [ "▁World", -9.609135627746582 ], [ "▁additional", -9.61176586151123 ], [ "ber", -9.613197326660156 ], [ "▁easily", -9.613990783691406 ], [ "▁deal", -9.615070343017578 ], [ "▁ways", -9.615514755249023 ], [ "▁mobile", -9.616837501525879 ], [ "▁national", -9.616913795471191 ], [ "▁couple", -9.617389678955078 ], [ "▁ihre", -9.61939811706543 ], [ "▁choice", -9.619612693786621 ], [ "for", -9.619686126708984 ], [ "ous", -9.62070083618164 ], [ "▁Google", -9.620855331420898 ], [ "▁environment", -9.622426986694336 ], [ "urile", -9.623322486877441 ], [ "▁Center", -9.626680374145508 ], [ "mp", -9.628592491149902 ], [ "▁»", -9.629727363586426 ], [ "qui", -9.630680084228516 ], [ "▁growth", -9.631048202514648 ], [ "ler", -9.633174896240234 ], [ "▁improve", -9.63360595703125 ], [ "▁items", -9.6336669921875 ], [ "▁Nu", -9.63393783569336 ], [ "▁leave", -9.634074211120605 ], [ "▁true", -9.634805679321289 ], [ "▁wurde", -9.63487434387207 ], [ "▁cannot", -9.635004043579102 ], [ "▁13", -9.635096549987793 ], [ "▁running", -9.636015892028809 ], [ "▁anti", -9.636177062988281 ], [ "▁option", -9.636306762695312 ], [ "▁reading", -9.63657283782959 ], [ "▁Car", -9.636698722839355 ], [ "▁Wir", -9.638110160827637 ], [ "▁April", -9.63975715637207 ], [ "▁behind", -9.640642166137695 ], [ "▁client", -9.640750885009766 ], [ "▁cover", -9.641012191772461 ], [ "▁stop", -9.641090393066406 ], [ "ja", -9.641277313232422 ], [ "▁built", -9.641307830810547 ], [ "▁Con", -9.641313552856445 ], [ "ement", -9.641366004943848 ], [ "▁projects", -9.641828536987305 ], [ "▁variety", -9.641840934753418 ], [ "▁Ihre", -9.642666816711426 ], [ "ș", -9.64302921295166 ], [ "▁unter", -9.64385986328125 ], [ "▁longer", -9.646577835083008 ], [ "year", -9.647161483764648 ], [ "▁photo", -9.648370742797852 ], [ "▁Also", -9.64933967590332 ], [ "▁received", -9.651098251342773 ], [ "▁return", -9.652676582336426 ], [ "00", -9.653081893920898 ], [ "▁bar", -9.653343200683594 ], [ "ary", -9.654427528381348 ], [ "elor", -9.655137062072754 ], [ "▁Home", -9.656189918518066 ], [ "our", -9.656298637390137 ], [ "▁Me", -9.65771198272705 ], [ "▁held", -9.659111022949219 ], [ "▁click", -9.66014289855957 ], [ "▁ex", -9.660178184509277 ], [ "▁cum", -9.661561965942383 ], [ "▁takes", -9.66395378112793 ], [ "▁computer", -9.665796279907227 ], [ "▁told", -9.668192863464355 ], [ "+", -9.670648574829102 ], [ "▁patients", -9.670809745788574 ], [ "ting", -9.672165870666504 ], [ "▁direct", -9.672248840332031 ], [ "▁quickly", -9.672410011291504 ], [ "tic", -9.672877311706543 ], [ "▁vom", -9.673723220825195 ], [ "▁di", -9.67381477355957 ], [ "▁kitchen", -9.674022674560547 ], [ "▁network", -9.675640106201172 ], [ "▁2015", -9.676688194274902 ], [ "▁effective", -9.677227020263672 ], [ "▁collection", -9.677703857421875 ], [ "▁2017", -9.677751541137695 ], [ "▁words", -9.678145408630371 ], [ "▁cele", -9.678857803344727 ], [ "▁student", -9.678862571716309 ], [ "▁amazing", -9.678932189941406 ], [ "eur", -9.680419921875 ], [ ".”", -9.68227481842041 ], [ "▁ale", -9.682716369628906 ], [ "”,", -9.68414306640625 ], [ "▁purchase", -9.684350967407227 ], [ "▁mean", -9.68477725982666 ], [ "▁West", -9.686846733093262 ], [ "▁nice", -9.6889066696167 ], [ "▁age", -9.689131736755371 ], [ "▁base", -9.68923568725586 ], [ "▁summer", -9.68928337097168 ], [ "▁multi", -9.689496994018555 ], [ "▁allows", -9.689573287963867 ], [ "▁latest", -9.689604759216309 ], [ "▁global", -9.68992805480957 ], [ "▁chance", -9.690792083740234 ], [ "▁sense", -9.690872192382812 ], [ "ieren", -9.692789077758789 ], [ "▁difficult", -9.693133354187012 ], [ "ité", -9.694750785827637 ], [ "ka", -9.694792747497559 ], [ "du", -9.69483757019043 ], [ "▁providing", -9.695744514465332 ], [ "▁Art", -9.696940422058105 ], [ "▁drive", -9.698554992675781 ], [ "▁Go", -9.698877334594727 ], [ "▁très", -9.699414253234863 ], [ "U", -9.699579238891602 ], [ "▁Pre", -9.699846267700195 ], [ "▁shows", -9.700040817260742 ], [ "▁hair", -9.701324462890625 ], [ "▁success", -9.701513290405273 ], [ "▁UK", -9.703169822692871 ], [ "red", -9.703241348266602 ], [ "ü", -9.703370094299316 ], [ "ish", -9.703631401062012 ], [ "▁weeks", -9.704839706420898 ], [ "▁solutions", -9.7055025100708 ], [ "▁Pe", -9.7057523727417 ], [ "▁equipment", -9.706141471862793 ], [ "și", -9.706482887268066 ], [ "▁worked", -9.707073211669922 ], [ "\".", -9.708627700805664 ], [ "▁legal", -9.708720207214355 ], [ "▁bad", -9.70892333984375 ], [ "▁40", -9.709561347961426 ], [ "▁Internet", -9.709798812866211 ], [ "▁included", -9.709976196289062 ], [ "▁upon", -9.710977554321289 ], [ "▁excellent", -9.71106243133545 ], [ "▁goal", -9.71130084991455 ], [ "▁El", -9.711408615112305 ], [ "▁Mo", -9.711703300476074 ], [ "▁policy", -9.71319580078125 ], [ "▁aussi", -9.713537216186523 ], [ "▁weight", -9.713687896728516 ], [ "ici", -9.715133666992188 ], [ "▁approach", -9.715584754943848 ], [ "▁six", -9.71579647064209 ], [ "▁entire", -9.715911865234375 ], [ "9", -9.71633529663086 ], [ "▁send", -9.716832160949707 ], [ "▁1.", -9.718971252441406 ], [ "▁wenn", -9.719056129455566 ], [ "▁photos", -9.71993637084961 ], [ ": -9.721014022827148 ], [ "ger", -9.72281551361084 ], [ "▁favorite", -9.723104476928711 ], [ "ley", -9.723477363586426 ], [ "▁else", -9.72463321685791 ], [ "▁types", -9.72468376159668 ], [ "▁link", -9.725333213806152 ], [ "▁recently", -9.72584056854248 ], [ "▁Mit", -9.72631549835205 ], [ "▁hot", -9.726548194885254 ], [ "tra", -9.726597785949707 ], [ "ş", -9.727307319641113 ], [ "▁according", -9.728511810302734 ], [ "▁necessary", -9.728511810302734 ], [ "▁multiple", -9.729269027709961 ], [ "▁Im", -9.729510307312012 ], [ "▁sehr", -9.729660034179688 ], [ "▁sign", -9.732263565063477 ], [ "▁anyone", -9.73283576965332 ], [ "▁land", -9.733613014221191 ], [ "▁States", -9.734037399291992 ], [ "▁unsere", -9.734119415283203 ], [ "ées", -9.734639167785645 ], [ "We", -9.735671043395996 ], [ "▁nothing", -9.735845565795898 ], [ "▁commercial", -9.736858367919922 ], [ "ful", -9.737265586853027 ], [ "▁seems", -9.739325523376465 ], [ "▁International", -9.740097045898438 ], [ "▁March", -9.74163818359375 ], [ "▁Thanks", -9.743307113647461 ], [ "▁County", -9.74365234375 ], [ "▁books", -9.744638442993164 ], [ "▁Ca", -9.7451753616333 ], [ "▁mi", -9.746304512023926 ], [ "▁meeting", -9.746662139892578 ], [ "▁tools", -9.747593879699707 ], [ "▁cut", -9.747650146484375 ], [ "▁related", -9.74765682220459 ], [ "▁lives", -9.748003005981445 ], [ "way", -9.748501777648926 ], [ "▁develop", -9.748651504516602 ], [ "▁sound", -9.748723983764648 ], [ "▁safe", -9.748950958251953 ], [ "▁Her", -9.74937629699707 ], [ "▁average", -9.751277923583984 ], [ "▁clean", -9.75174331665039 ], [ "▁talk", -9.752362251281738 ], [ "▁peut", -9.75241756439209 ], [ "▁dann", -9.752546310424805 ], [ "▁terms", -9.753265380859375 ], [ "▁foarte", -9.753512382507324 ], [ "▁super", -9.754284858703613 ], [ "▁programs", -9.754853248596191 ], [ "▁decision", -9.75540828704834 ], [ "▁costs", -9.756058692932129 ], [ "▁être", -9.756291389465332 ], [ "▁2019", -9.757674217224121 ], [ "led", -9.759482383728027 ], [ "▁parents", -9.759617805480957 ], [ "▁Mr", -9.761702537536621 ], [ "▁lower", -9.762362480163574 ], [ "▁door", -9.762978553771973 ], [ "▁été", -9.763933181762695 ], [ "▁box", -9.764954566955566 ], [ "▁record", -9.765517234802246 ], [ "▁win", -9.765650749206543 ], [ "ster", -9.766402244567871 ], [ "▁America", -9.766748428344727 ], [ "▁immer", -9.768763542175293 ], [ "▁road", -9.76996898651123 ], [ "▁leading", -9.772759437561035 ], [ "▁section", -9.772838592529297 ], [ "▁Facebook", -9.772990226745605 ], [ "▁Most", -9.7738676071167 ], [ "iert", -9.77435302734375 ], [ "▁morning", -9.774497032165527 ], [ "▁asked", -9.775190353393555 ], [ "▁involved", -9.77551555633545 ], [ "▁hier", -9.777607917785645 ], [ "▁images", -9.77821159362793 ], [ "▁House", -9.778263092041016 ], [ "▁highly", -9.780763626098633 ], [ "▁Bar", -9.781620979309082 ], [ "▁Service", -9.782510757446289 ], [ "▁attention", -9.784318923950195 ], [ "▁normal", -9.784571647644043 ], [ "▁plans", -9.785883903503418 ], [ "▁source", -9.785930633544922 ], [ "▁Aus", -9.788092613220215 ], [ "▁benefits", -9.788655281066895 ], [ "▁ses", -9.789348602294922 ], [ "des", -9.789867401123047 ], [ "▁internet", -9.789949417114258 ], [ "▁materials", -9.790080070495605 ], [ "▁même", -9.791318893432617 ], [ "▁fine", -9.791522026062012 ], [ "▁fit", -9.792226791381836 ], [ "▁21", -9.792612075805664 ], [ "▁itself", -9.793739318847656 ], [ "▁wieder", -9.793972969055176 ], [ "▁Many", -9.795313835144043 ], [ "▁nature", -9.795402526855469 ], [ "▁pain", -9.795467376708984 ], [ "▁device", -9.796183586120605 ], [ "art", -9.796989440917969 ], [ "pro", -9.7971830368042 ], [ "▁France", -9.797271728515625 ], [ "lich", -9.797314643859863 ], [ "▁2014", -9.799542427062988 ], [ "▁inter", -9.799964904785156 ], [ "▁Li", -9.800453186035156 ], [ "▁career", -9.801136016845703 ], [ "▁looks", -9.80145263671875 ], [ "▁ré", -9.802245140075684 ], [ "▁ability", -9.802556991577148 ], [ "▁situation", -9.803154945373535 ], [ "ville", -9.803157806396484 ], [ "▁2016", -9.80319595336914 ], [ "tes", -9.803462982177734 ], [ "▁remember", -9.803879737854004 ], [ "▁TV", -9.803998947143555 ], [ "▁levels", -9.805853843688965 ], [ "▁subject", -9.807723999023438 ], [ "ally", -9.80844497680664 ], [ "▁reduce", -9.810232162475586 ], [ "▁*", -9.8108491897583 ], [ "▁Day", -9.810867309570312 ], [ "▁write", -9.812152862548828 ], [ "▁pick", -9.814252853393555 ], [ "ence", -9.815399169921875 ], [ "▁fresh", -9.816520690917969 ], [ "▁traditional", -9.816662788391113 ], [ "chi", -9.817692756652832 ], [ "▁machine", -9.818047523498535 ], [ "▁resources", -9.819125175476074 ], [ "â", -9.819502830505371 ], [ "▁countries", -9.820009231567383 ], [ "▁Even", -9.820342063903809 ], [ "▁green", -9.821283340454102 ], [ "▁Free", -9.821910858154297 ], [ "▁daily", -9.822112083435059 ], [ "▁respect", -9.823013305664062 ], [ "▁instead", -9.823714256286621 ], [ "▁Once", -9.82418155670166 ], [ "▁word", -9.824407577514648 ], [ "▁construction", -9.82489013671875 ], [ "▁huge", -9.825064659118652 ], [ "▁feature", -9.825220108032227 ], [ "▁themselves", -9.826369285583496 ], [ "▁loss", -9.82919692993164 ], [ "%", -9.830063819885254 ], [ "▁safety", -9.830256462097168 ], [ "▁economic", -9.831406593322754 ], [ "▁require", -9.831945419311523 ], [ "30", -9.83255386352539 ], [ "▁planning", -9.833393096923828 ], [ "▁mal", -9.834482192993164 ], [ "▁directly", -9.835214614868164 ], [ "ure", -9.835719108581543 ], [ "▁track", -9.835734367370605 ], [ "▁tool", -9.836135864257812 ], [ "▁positive", -9.836392402648926 ], [ "▁piece", -9.837076187133789 ], [ "▁parts", -9.837140083312988 ], [ "ang", -9.83740520477295 ], [ "▁trip", -9.837453842163086 ], [ "▁organization", -9.837935447692871 ], [ "▁sites", -9.838274002075195 ], [ "▁fire", -9.83831787109375 ], [ "▁China", -9.838876724243164 ], [ "▁Pour", -9.839289665222168 ], [ "▁plant", -9.84011459350586 ], [ "▁board", -9.840341567993164 ], [ "▁interesting", -9.841227531433105 ], [ "gar", -9.841713905334473 ], [ "▁fie", -9.841752052307129 ], [ "▁late", -9.842166900634766 ], [ "▁wall", -9.842294692993164 ], [ "▁walk", -9.842741966247559 ], [ "ham", -9.843868255615234 ], [ "▁Ne", -9.845427513122559 ], [ "▁First", -9.845462799072266 ], [ "▁double", -9.845701217651367 ], [ "▁budget", -9.847657203674316 ], [ "▁cases", -9.847670555114746 ], [ "cal", -9.849738121032715 ], [ "old", -9.849796295166016 ], [ "▁Bo", -9.849822998046875 ], [ "▁spend", -9.850439071655273 ], [ "port", -9.850828170776367 ], [ "▁worth", -9.850934028625488 ], [ "ique", -9.851308822631836 ], [ "nes", -9.85190486907959 ], [ "cul", -9.852272033691406 ], [ "era", -9.85296630859375 ], [ "▁text", -9.853032112121582 ], [ "▁decided", -9.854948997497559 ], [ "▁floor", -9.855036735534668 ], [ "▁requirements", -9.85529899597168 ], [ "▁cel", -9.855361938476562 ], [ "▁effect", -9.855412483215332 ], [ "▁gibt", -9.856159210205078 ], [ "▁news", -9.859238624572754 ], [ "▁vos", -9.859931945800781 ], [ "▁players", -9.86057186126709 ], [ "▁saw", -9.862728118896484 ], [ "▁auto", -9.863056182861328 ], [ "▁town", -9.863207817077637 ], [ "▁myself", -9.864106178283691 ], [ "▁lost", -9.864988327026367 ], [ "▁$", -9.865124702453613 ], [ "▁June", -9.86609172821045 ], [ "▁significant", -9.866196632385254 ], [ "▁giving", -9.866230010986328 ], [ "▁stand", -9.866744041442871 ], [ "▁stock", -9.867657661437988 ], [ "▁hold", -9.867766380310059 ], [ "▁Are", -9.869078636169434 ], [ "▁shall", -9.86923599243164 ], [ "▁ideal", -9.869279861450195 ], [ "▁London", -9.87080192565918 ], [ "▁answer", -9.870853424072266 ], [ "▁Vor", -9.87157917022705 ], [ "▁gives", -9.873115539550781 ], [ "ative", -9.87316608428955 ], [ "▁timp", -9.873167991638184 ], [ "▁center", -9.87362289428711 ], [ "▁Group", -9.874580383300781 ], [ "▁sans", -9.875143051147461 ], [ "▁Ar", -9.875466346740723 ], [ "▁Ma", -9.875568389892578 ], [ "▁reach", -9.876279830932617 ], [ "ren", -9.876652717590332 ], [ "▁More", -9.877446174621582 ], [ "mit", -9.878068923950195 ], [ "▁guide", -9.87833309173584 ], [ "▁fully", -9.878828048706055 ], [ "▁Since", -9.878952980041504 ], [ "▁Inc", -9.87923812866211 ], [ "▁culture", -9.879780769348145 ], [ "eat", -9.880531311035156 ], [ "▁written", -9.880722999572754 ], [ "▁Ho", -9.881338119506836 ], [ "▁India", -9.881625175476074 ], [ "▁Well", -9.881708145141602 ], [ "back", -9.881752967834473 ], [ "▁goes", -9.882170677185059 ], [ "▁completely", -9.88217544555664 ], [ "▁tour", -9.883081436157227 ], [ "▁began", -9.883196830749512 ], [ "▁picture", -9.883255958557129 ], [ "▁mare", -9.88353157043457 ], [ "▁playing", -9.884223937988281 ], [ "▁trebuie", -9.884926795959473 ], [ "ils", -9.884940147399902 ], [ "chen", -9.885220527648926 ], [ "▁hit", -9.885416984558105 ], [ "▁complex", -9.88591480255127 ], [ "▁Thank", -9.886140823364258 ], [ "▁Let", -9.886350631713867 ], [ "▁applications", -9.887116432189941 ], [ "▁friend", -9.888312339782715 ], [ "▁English", -9.889549255371094 ], [ "▁charge", -9.890040397644043 ], [ "▁recommend", -9.893453598022461 ], [ "▁message", -9.893672943115234 ], [ "In", -9.893722534179688 ], [ "▁Mar", -9.894762992858887 ], [ "pp", -9.895845413208008 ], [ "▁method", -9.89692497253418 ], [ "▁successful", -9.897004127502441 ], [ "tion", -9.898880958557129 ], [ "▁release", -9.899920463562012 ], [ "▁creating", -9.900403022766113 ], [ "▁despre", -9.90141773223877 ], [ "esc", -9.902434349060059 ], [ "▁eye", -9.902752876281738 ], [ "▁apply", -9.905945777893066 ], [ "net", -9.906000137329102 ], [ "side", -9.906539916992188 ], [ "▁ar", -9.906949996948242 ], [ "▁platform", -9.90713882446289 ], [ "▁touch", -9.907329559326172 ], [ "▁towards", -9.90785026550293 ], [ "▁match", -9.908224105834961 ], [ "▁Black", -9.909344673156738 ], [ "▁fall", -9.90961742401123 ], [ "▁ground", -9.910234451293945 ], [ "▁High", -9.910740852355957 ], [ "▁Q", -9.911155700683594 ], [ "▁schon", -9.911709785461426 ], [ "▁hotel", -9.911751747131348 ], [ "▁prices", -9.912031173706055 ], [ "▁developed", -9.913411140441895 ], [ "uk", -9.913476943969727 ], [ "ide", -9.91367244720459 ], [ "▁September", -9.91370964050293 ], [ "ized", -9.914202690124512 ], [ "▁War", -9.914704322814941 ], [ "!!", -9.916285514831543 ], [ "▁grow", -9.916997909545898 ], [ "▁watch", -9.917067527770996 ], [ "▁storage", -9.917412757873535 ], [ "eau", -9.917513847351074 ], [ "can", -9.918373107910156 ], [ "▁Get", -9.919524192810059 ], [ "▁See", -9.91953182220459 ], [ "▁European", -9.919703483581543 ], [ "▁language", -9.91982650756836 ], [ "ează", -9.920175552368164 ], [ "▁court", -9.920334815979004 ], [ "▁Why", -9.921106338500977 ], [ "▁hear", -9.921342849731445 ], [ "▁doar", -9.921804428100586 ], [ "lan", -9.92330265045166 ], [ "▁Christmas", -9.923810958862305 ], [ "▁Web", -9.923871994018555 ], [ "vo", -9.92405891418457 ], [ "▁sent", -9.924983024597168 ], [ "▁businesses", -9.925868034362793 ], [ "▁Red", -9.926278114318848 ], [ "tel", -9.926375389099121 ], [ "▁Ha", -9.926508903503418 ], [ "▁wonderful", -9.926653861999512 ], [ "ations", -9.926738739013672 ], [ "za", -9.92748737335205 ], [ "▁22", -9.928659439086914 ], [ "▁thinking", -9.92941665649414 ], [ "▁became", -9.929733276367188 ], [ "▁cool", -9.929835319519043 ], [ "▁speed", -9.930370330810547 ], [ "mar", -9.930426597595215 ], [ "▁--", -9.931743621826172 ], [ "▁groups", -9.931920051574707 ], [ "▁interested", -9.93198299407959 ], [ "ak", -9.93218994140625 ], [ "▁60", -9.932672500610352 ], [ "▁screen", -9.93370246887207 ], [ "▁Design", -9.933789253234863 ], [ "▁limited", -9.935648918151855 ], [ "▁expected", -9.935959815979004 ], [ "▁opportunities", -9.936376571655273 ], [ "▁regular", -9.936870574951172 ], [ "off", -9.93702220916748 ], [ "▁Best", -9.937298774719238 ], [ "Re", -9.938436508178711 ], [ "▁ihr", -9.938719749450684 ], [ "▁Great", -9.938907623291016 ], [ "▁employees", -9.93924617767334 ], [ "▁custom", -9.939679145812988 ], [ "▁multe", -9.940123558044434 ], [ "let", -9.940876007080078 ], [ "▁benefit", -9.942487716674805 ], [ "▁term", -9.942623138427734 ], [ "▁bine", -9.942869186401367 ], [ "▁deep", -9.944526672363281 ], [ "▁August", -9.94526481628418 ], [ "▁President", -9.945381164550781 ], [ "▁Auf", -9.945854187011719 ], [ "▁wish", -9.946924209594727 ], [ "▁sometimes", -9.947274208068848 ], [ "ari", -9.947793960571289 ], [ "▁pressure", -9.948184967041016 ], [ "▁ani", -9.94859504699707 ], [ "▁trade", -9.949930191040039 ], [ "▁firm", -9.950027465820312 ], [ "▁comment", -9.95003604888916 ], [ "▁November", -9.950242042541504 ], [ "▁expect", -9.951102256774902 ], [ "▁2012", -9.952491760253906 ], [ "▁Ich", -9.95328140258789 ], [ "▁relationship", -9.95363998413086 ], [ "▁active", -9.954682350158691 ], [ "org", -9.954710960388184 ], [ "▁heat", -9.956732749938965 ], [ "▁wood", -9.95678997039795 ], [ "▁notre", -9.957921028137207 ], [ "▁function", -9.958330154418945 ], [ "▁2.", -9.95909309387207 ], [ "▁wedding", -9.960049629211426 ], [ "▁starting", -9.961235046386719 ], [ "▁Health", -9.961249351501465 ], [ "\",", -9.961713790893555 ], [ "▁death", -9.962173461914062 ], [ "▁pages", -9.962764739990234 ], [ "▁vehicle", -9.96293830871582 ], [ "▁request", -9.963874816894531 ], [ "▁helps", -9.963916778564453 ], [ "▁blue", -9.964017868041992 ], [ "▁analysis", -9.964414596557617 ], [ "▁posted", -9.964544296264648 ], [ "▁healthy", -9.964814186096191 ], [ "▁contract", -9.964988708496094 ], [ "▁•", -9.965263366699219 ], [ "▁Each", -9.965293884277344 ], [ "▁Fa", -9.966179847717285 ], [ "▁dintre", -9.966221809387207 ], [ "▁Friday", -9.967202186584473 ], [ "▁considered", -9.967992782592773 ], [ "cher", -9.96826457977295 ], [ "▁quick", -9.968731880187988 ], [ "▁understanding", -9.96916389465332 ], [ "▁condition", -9.969378471374512 ], [ "ization", -9.971049308776855 ], [ "▁document", -9.971664428710938 ], [ "▁prevent", -9.971890449523926 ], [ "▁growing", -9.9725341796875 ], [ "▁protection", -9.972620964050293 ], [ "▁cat", -9.974002838134766 ], [ "▁ -9.975058555603027 ], [ "10", -9.975275039672852 ], [ "▁join", -9.9759521484375 ], [ "▁serve", -9.976580619812012 ], [ "▁blood", -9.977095603942871 ], [ "▁July", -9.977341651916504 ], [ "▁region", -9.977787971496582 ], [ "car", -9.97933578491211 ], [ "▁entre", -9.979788780212402 ], [ "▁physical", -9.981287002563477 ], [ "▁cash", -9.9813232421875 ], [ "aux", -9.981823921203613 ], [ "ng", -9.982654571533203 ], [ "▁stage", -9.98281478881836 ], [ "▁seem", -9.983034133911133 ], [ "▁definitely", -9.983795166015625 ], [ "▁investment", -9.983827590942383 ], [ "▁purpose", -9.985441207885742 ], [ "▁begin", -9.985486030578613 ], [ "®", -9.985495567321777 ], [ "▁break", -9.985701560974121 ], [ "itate", -9.987293243408203 ], [ "▁moving", -9.989288330078125 ], [ "▁met", -9.990678787231445 ], [ "ize", -9.990833282470703 ], [ "▁select", -9.991165161132812 ], [ "▁tous", -9.991310119628906 ], [ "▁Europe", -9.991639137268066 ], [ "@", -9.992724418640137 ], [ "▁individuals", -9.993392944335938 ], [ "▁Zeit", -9.993524551391602 ], [ "gu", -9.995670318603516 ], [ "▁unit", -9.995753288269043 ], [ "▁noi", -9.996089935302734 ], [ "▁places", -9.996171951293945 ], [ "all", -9.99632453918457 ], [ "▁wait", -9.996755599975586 ], [ "▁difference", -9.997234344482422 ], [ "▁round", -9.998015403747559 ], [ "50", -9.99953842163086 ], [ "rie", -9.999545097351074 ], [ "▁Et", -9.999933242797852 ], [ "20", -10.000725746154785 ], [ "▁activity", -10.000792503356934 ], [ "е", -10.000866889953613 ], [ "▁Windows", -10.001087188720703 ], [ "▁produce", -10.001385688781738 ], [ "▁keine", -10.00212574005127 ], [ "▁Air", -10.002567291259766 ], [ "▁January", -10.004890441894531 ], [ "▁deux", -10.005081176757812 ], [ "▁entry", -10.005208015441895 ], [ "king", -10.006500244140625 ], [ "▁goals", -10.006736755371094 ], [ "▁previous", -10.0077543258667 ], [ "▁+", -10.008035659790039 ], [ "▁Business", -10.008259773254395 ], [ "ont", -10.008552551269531 ], [ "▁Sunday", -10.008694648742676 ], [ "▁offering", -10.010359764099121 ], [ "▁response", -10.011018753051758 ], [ "▁surface", -10.011393547058105 ], [ "▁Department", -10.01212215423584 ], [ "▁exactly", -10.012190818786621 ], [ "▁Online", -10.012577056884766 ], [ "dem", -10.013803482055664 ], [ "ischen", -10.014006614685059 ], [ "▁hands", -10.015100479125977 ], [ "▁hour", -10.016197204589844 ], [ "▁dog", -10.016946792602539 ], [ "▁damage", -10.017006874084473 ], [ "▁capital", -10.018792152404785 ], [ "▁toate", -10.020488739013672 ], [ "▁wrong", -10.020674705505371 ], [ "unui", -10.022201538085938 ], [ "tri", -10.023979187011719 ], [ "▁sell", -10.023999214172363 ], [ "▁published", -10.024175643920898 ], [ "▁families", -10.024675369262695 ], [ "▁avoid", -10.025490760803223 ], [ "▁Ko", -10.025506019592285 ], [ "▁mod", -10.026697158813477 ], [ "rat", -10.027653694152832 ], [ "▁Make", -10.0299654006958 ], [ "▁October", -10.030153274536133 ], [ "▁former", -10.031285285949707 ], [ "▁Services", -10.03281021118164 ], [ "▁felt", -10.033045768737793 ], [ "▁selection", -10.033309936523438 ], [ "eaza", -10.034177780151367 ], [ "gel", -10.034422874450684 ], [ "▁Good", -10.035792350769043 ], [ "▁actual", -10.0364351272583 ], [ "▁gut", -10.036853790283203 ], [ "▁gas", -10.03708553314209 ], [ "15", -10.038182258605957 ], [ "▁structure", -10.038285255432129 ], [ "▁act", -10.0386381149292 ], [ "▁Zu", -10.038654327392578 ], [ "▁creative", -10.039134979248047 ], [ "▁Vi", -10.039159774780273 ], [ "▁shop", -10.04066276550293 ], [ "▁Lo", -10.040735244750977 ], [ "şi", -10.042192459106445 ], [ "▁mis", -10.042224884033203 ], [ "ungen", -10.042301177978516 ], [ "▁fan", -10.04240608215332 ], [ "▁|", -10.043391227722168 ], [ "▁Bei", -10.044037818908691 ], [ "▁protect", -10.04454517364502 ], [ "▁Na", -10.0447998046875 ], [ "q", -10.045693397521973 ], [ "ok", -10.04710578918457 ], [ "▁California", -10.047263145446777 ], [ "▁political", -10.047301292419434 ], [ "25", -10.047530174255371 ], [ "▁feeling", -10.047913551330566 ], [ "▁ces", -10.048321723937988 ], [ "▁display", -10.048857688903809 ], [ "▁essential", -10.04964542388916 ], [ "ând", -10.049971580505371 ], [ "▁seine", -10.050551414489746 ], [ "▁soft", -10.050915718078613 ], [ "ach", -10.05102252960205 ], [ "▁happen", -10.051118850708008 ], [ "▁Paul", -10.053346633911133 ], [ "▁Cu", -10.054024696350098 ], [ "house", -10.055376052856445 ], [ "ante", -10.05582046508789 ], [ "▁easier", -10.056551933288574 ], [ "▁sort", -10.0567045211792 ], [ "▁Post", -10.057138442993164 ], [ "▁accept", -10.05730152130127 ], [ "field", -10.057648658752441 ], [ "zen", -10.057741165161133 ], [ "▁character", -10.057848930358887 ], [ "▁beginning", -10.058433532714844 ], [ "▁Jesus", -10.058760643005371 ], [ "▁weekend", -10.059663772583008 ], [ "▁certainly", -10.06114387512207 ], [ "▁THE", -10.061254501342773 ], [ "▁alle", -10.06189250946045 ], [ "▁transport", -10.062220573425293 ], [ "▁Saturday", -10.063043594360352 ], [ "▁basic", -10.064136505126953 ], [ "▁loved", -10.06431770324707 ], [ "ros", -10.065333366394043 ], [ "▁offered", -10.065996170043945 ], [ "▁camera", -10.067024230957031 ], [ "▁Green", -10.06789779663086 ], [ "ology", -10.069480895996094 ], [ "ä", -10.069646835327148 ], [ "▁manage", -10.070416450500488 ], [ "▁paid", -10.070881843566895 ], [ "▁advice", -10.071617126464844 ], [ "▁patient", -10.072234153747559 ], [ "▁spent", -10.072272300720215 ], [ "▁mir", -10.072366714477539 ], [ "▁baby", -10.072400093078613 ], [ "ö", -10.073193550109863 ], [ "▁basis", -10.073338508605957 ], [ "▁cancer", -10.073765754699707 ], [ "▁Although", -10.07400894165039 ], [ "▁gift", -10.074336051940918 ], [ "▁3.", -10.074871063232422 ], [ "dieser", -10.075157165527344 ], [ "▁overall", -10.07520580291748 ], [ "▁Sch", -10.075265884399414 ], [ "▁Ex", -10.076258659362793 ], [ "▁December", -10.07689094543457 ], [ "▁released", -10.078214645385742 ], [ "▁prior", -10.07900333404541 ], [ "▁sowie", -10.081072807312012 ], [ "▁club", -10.081326484680176 ], [ "▁Street", -10.081535339355469 ], [ "▁College", -10.08254623413086 ], [ "▁î", -10.083059310913086 ], [ "over", -10.083159446716309 ], [ "▁gave", -10.08454704284668 ], [ "▁truly", -10.084784507751465 ], [ "par", -10.084806442260742 ], [ "▁Canada", -10.084888458251953 ], [ "▁existing", -10.085420608520508 ], [ "lie", -10.086335182189941 ], [ "▁ganz", -10.086658477783203 ], [ "▁setting", -10.087109565734863 ], [ "▁supply", -10.08739185333252 ], [ "▁college", -10.087540626525879 ], [ "▁communication", -10.088407516479492 ], [ "▁23", -10.088834762573242 ], [ "▁pass", -10.091546058654785 ], [ "▁devices", -10.091872215270996 ], [ "▁glass", -10.092083930969238 ], [ "▁experienced", -10.092395782470703 ], [ "▁grand", -10.093363761901855 ], [ "▁Po", -10.093396186828613 ], [ "▁beyond", -10.094029426574707 ], [ "▁format", -10.094165802001953 ], [ "▁mon", -10.09461498260498 ], [ "▁perform", -10.094635009765625 ], [ "sten", -10.095130920410156 ], [ "▁1,", -10.096270561218262 ], [ "▁Per", -10.096640586853027 ], [ "▁sold", -10.097247123718262 ], [ "▁rates", -10.0972900390625 ], [ "▁regarding", -10.097782135009766 ], [ "▁Paris", -10.098291397094727 ], [ "▁Dar", -10.099579811096191 ], [ "▁challenge", -10.099649429321289 ], [ "▁feet", -10.100564002990723 ], [ "▁Su", -10.102017402648926 ], [ "je", -10.102593421936035 ], [ "▁Bank", -10.102627754211426 ], [ "ven", -10.103126525878906 ], [ "jo", -10.103290557861328 ], [ "▁band", -10.10348892211914 ], [ "▁delivery", -10.104915618896484 ], [ "Vous", -10.104924201965332 ], [ "tele", -10.10495376586914 ], [ "▁East", -10.105379104614258 ], [ "▁pictures", -10.106067657470703 ], [ "▁useful", -10.106481552124023 ], [ "*", -10.107648849487305 ], [ "▁increased", -10.107746124267578 ], [ "▁stories", -10.108119010925293 ], [ "sion", -10.108280181884766 ], [ "bra", -10.108345985412598 ], [ "▁brought", -10.108466148376465 ], [ "▁effort", -10.109898567199707 ], [ "▁payment", -10.11058235168457 ], [ "▁heard", -10.110925674438477 ], [ "▁played", -10.111245155334473 ], [ "▁White", -10.111417770385742 ], [ "▁metal", -10.111721992492676 ], [ "tal", -10.111754417419434 ], [ "▁engine", -10.112006187438965 ], [ "▁Club", -10.11218547821045 ], [ "ical", -10.114581108093262 ], [ "▁effects", -10.115421295166016 ], [ "▁degree", -10.115763664245605 ], [ "▁bed", -10.1159086227417 ], [ "ette", -10.115991592407227 ], [ "▁David", -10.116386413574219 ], [ "°", -10.117666244506836 ], [ "▁Au", -10.117938041687012 ], [ "▁Company", -10.11845874786377 ], [ "▁player", -10.11938190460205 ], [ "▁Today", -10.120569229125977 ], [ "▁maintain", -10.12093448638916 ], [ "▁minute", -10.121193885803223 ], [ "mail", -10.122172355651855 ], [ "▁race", -10.122366905212402 ], [ "▁comfortable", -10.123887062072754 ], [ "▁responsible", -10.124085426330566 ], [ "vor", -10.124622344970703 ], [ "▁associated", -10.124695777893066 ], [ "▁weather", -10.124701499938965 ], [ "▁$1", -10.125639915466309 ], [ "▁tried", -10.126176834106445 ], [ "▁Check", -10.127649307250977 ], [ "▁solid", -10.127864837646484 ], [ "▁movie", -10.128364562988281 ], [ "▁coffee", -10.12874698638916 ], [ "board", -10.129073143005371 ], [ "▁po", -10.12946605682373 ], [ "▁warm", -10.129583358764648 ], [ "▁connect", -10.131733894348145 ], [ "▁Ad", -10.133807182312012 ], [ "work", -10.133859634399414 ], [ "mal", -10.13397216796875 ], [ "▁Act", -10.134634971618652 ], [ "▁achieve", -10.134769439697266 ], [ "▁Nach", -10.136604309082031 ], [ "www", -10.136669158935547 ], [ "term", -10.13672161102295 ], [ "▁claim", -10.137251853942871 ], [ "▁particularly", -10.138245582580566 ], [ "▁cas", -10.138396263122559 ], [ "▁furniture", -10.138461112976074 ], [ "▁finish", -10.13896369934082 ], [ "▁temps", -10.139026641845703 ], [ "▁disease", -10.139115333557129 ], [ "▁lots", -10.139196395874023 ], [ "▁ball", -10.139307975769043 ], [ "▁sun", -10.14010238647461 ], [ "▁strategy", -10.140498161315918 ], [ "bre", -10.140518188476562 ], [ "▁mine", -10.141541481018066 ], [ "▁Click", -10.141743659973145 ], [ "ran", -10.141983032226562 ], [ "▁Will", -10.142234802246094 ], [ "▁garden", -10.142974853515625 ], [ "▁stuff", -10.14359188079834 ], [ "▁limit", -10.144641876220703 ], [ "▁bottom", -10.14494800567627 ], [ "▁shown", -10.144962310791016 ], [ "ship", -10.145271301269531 ], [ "▁habe", -10.145858764648438 ], [ "▁Super", -10.146219253540039 ], [ "▁completed", -10.146971702575684 ], [ "▁wine", -10.146979331970215 ], [ "ische", -10.147262573242188 ], [ "▁largest", -10.147466659545898 ], [ "▁appropriate", -10.148261070251465 ], [ "▁immediately", -10.150248527526855 ], [ "▁Hi", -10.152358055114746 ], [ "▁trust", -10.152767181396484 ], [ "ability", -10.154254913330078 ], [ "▁powerful", -10.155101776123047 ], [ "▁helping", -10.155620574951172 ], [ "▁schedule", -10.155688285827637 ], [ "▁correct", -10.155707359313965 ], [ "▁transfer", -10.156496047973633 ], [ "pre", -10.15665340423584 ], [ "▁journey", -10.15688419342041 ], [ "pm", -10.157002449035645 ], [ "don", -10.158435821533203 ], [ "▁highest", -10.159249305725098 ], [ "▁finally", -10.15999698638916 ], [ "form", -10.160258293151855 ], [ "▁extremely", -10.160404205322266 ], [ "▁window", -10.160501480102539 ], [ "▁Over", -10.162222862243652 ], [ "▁remove", -10.162469863891602 ], [ "wood", -10.162479400634766 ], [ "▁2013", -10.163631439208984 ], [ "▁mother", -10.164072036743164 ], [ "▁Auto", -10.16436767578125 ], [ "▁annual", -10.164615631103516 ], [ "▁Star", -10.164834976196289 ], [ "▁Di", -10.166138648986816 ], [ "о", -10.16711139678955 ], [ "▁gold", -10.167129516601562 ], [ "tar", -10.167352676391602 ], [ "ju", -10.167750358581543 ], [ "▁Use", -10.169474601745605 ], [ "▁thanks", -10.16960334777832 ], [ "▁centre", -10.170127868652344 ], [ "▁Australia", -10.170358657836914 ], [ "▁estate", -10.170504570007324 ], [ "▁eyes", -10.1714448928833 ], [ "▁force", -10.171592712402344 ], [ "▁income", -10.17395305633545 ], [ "▁science", -10.174036026000977 ], [ "ori", -10.174230575561523 ], [ "▁enter", -10.174851417541504 ], [ "▁28", -10.175408363342285 ], [ "ire", -10.17568302154541 ], [ "▁schools", -10.175797462463379 ], [ "▁restaurant", -10.176088333129883 ], [ "▁Council", -10.177032470703125 ], [ "aus", -10.177885055541992 ], [ "▁agree", -10.17905330657959 ], [ "▁campaign", -10.179192543029785 ], [ "▁Ta", -10.179428100585938 ], [ "▁letter", -10.179814338684082 ], [ "▁central", -10.179931640625 ], [ "▁Because", -10.180054664611816 ], [ "▁path", -10.180349349975586 ], [ "▁loc", -10.180882453918457 ], [ "▁files", -10.182587623596191 ], [ "▁population", -10.182705879211426 ], [ "▁explore", -10.182723999023438 ], [ "▁mid", -10.182734489440918 ], [ "▁concept", -10.182748794555664 ], [ "▁church", -10.183015823364258 ], [ "80", -10.183026313781738 ], [ "▁einfach", -10.185834884643555 ], [ "▁reasons", -10.186690330505371 ], [ "▁determine", -10.186755180358887 ], [ "▁February", -10.187095642089844 ], [ "▁evidence", -10.18797779083252 ], [ "▁sleep", -10.188036918640137 ], [ "▁Board", -10.188652992248535 ], [ "▁maybe", -10.189635276794434 ], [ "▁wasn", -10.189701080322266 ], [ "▁Monday", -10.190101623535156 ], [ "▁director", -10.190481185913086 ], [ "well", -10.190974235534668 ], [ "During", -10.191001892089844 ], [ "▁sweet", -10.191061973571777 ], [ "▁assist", -10.19124984741211 ], [ "▁police", -10.191511154174805 ], [ "▁repair", -10.191729545593262 ], [ "▁techniques", -10.191733360290527 ], [ "▁served", -10.191808700561523 ], [ "vi", -10.192037582397461 ], [ "▁sports", -10.192331314086914 ], [ "▁opening", -10.192401885986328 ], [ "▁ones", -10.192731857299805 ], [ "▁notice", -10.193460464477539 ], [ "▁PC", -10.193547248840332 ], [ "▁alte", -10.194242477416992 ], [ "▁Bi", -10.194340705871582 ], [ "▁cold", -10.195606231689453 ], [ "▁billion", -10.195794105529785 ], [ "▁balance", -10.196361541748047 ], [ "cer", -10.196417808532715 ], [ "▁nearly", -10.196725845336914 ], [ "▁wear", -10.197259902954102 ], [ "free", -10.19760799407959 ], [ "▁Have", -10.197748184204102 ], [ "▁comfort", -10.199211120605469 ], [ "▁studies", -10.199225425720215 ], [ "▁traffic", -10.199540138244629 ], [ "▁item", -10.200214385986328 ], [ "▁teaching", -10.200467109680176 ], [ "▁turned", -10.201326370239258 ], [ "isation", -10.201354026794434 ], [ "12", -10.202038764953613 ], [ "▁greater", -10.202167510986328 ], [ "▁knew", -10.20233154296875 ], [ "▁Association", -10.203333854675293 ], [ "▁Office", -10.203802108764648 ], [ "▁established", -10.204085350036621 ], [ "45", -10.204170227050781 ], [ "▁Love", -10.204318046569824 ], [ "▁changed", -10.204882621765137 ], [ "▁pan", -10.205184936523438 ], [ "van", -10.20565414428711 ], [ "▁Mi", -10.205663681030273 ], [ "▁tend", -10.20637321472168 ], [ "▁connection", -10.206522941589355 ], [ "▁lack", -10.206954002380371 ], [ "▁bank", -10.208464622497559 ], [ "cat", -10.208720207214355 ], [ "▁helped", -10.209071159362793 ], [ "▁spot", -10.209417343139648 ], [ "▁spring", -10.20974063873291 ], [ "▁Wi", -10.210912704467773 ], [ "▁Mac", -10.211682319641113 ], [ "▁Christ", -10.212015151977539 ], [ "▁saying", -10.212835311889648 ], [ "▁General", -10.213062286376953 ], [ "▁port", -10.213099479675293 ], [ "▁Mal", -10.213156700134277 ], [ "▁System", -10.213486671447754 ], [ "▁According", -10.2152738571167 ], [ "▁chiar", -10.21568489074707 ], [ "log", -10.21576976776123 ], [ "▁mix", -10.215974807739258 ], [ "▁Lake", -10.216042518615723 ], [ "▁intr", -10.216590881347656 ], [ "▁deliver", -10.216793060302734 ], [ "mon", -10.216931343078613 ], [ "▁Ro", -10.217060089111328 ], [ "▁Management", -10.217504501342773 ], [ "bri", -10.218718528747559 ], [ "▁pieces", -10.218774795532227 ], [ "▁announced", -10.218926429748535 ], [ "▁Yes", -10.219268798828125 ], [ "▁dark", -10.220884323120117 ], [ "val", -10.221765518188477 ], [ "▁rights", -10.22309684753418 ], [ "▁Diese", -10.223100662231445 ], [ "ki", -10.223350524902344 ], [ "vent", -10.22375774383545 ], [ "▁born", -10.22380542755127 ], [ "▁muss", -10.224031448364258 ], [ "compared", -10.224660873413086 ], [ "▁demand", -10.224669456481934 ], [ "▁handle", -10.225493431091309 ], [ "▁mode", -10.226058006286621 ], [ "lic", -10.226137161254883 ], [ "▁ahead", -10.226436614990234 ], [ "▁sharing", -10.227599143981934 ], [ "▁micro", -10.227779388427734 ], [ "▁Par", -10.228626251220703 ], [ "▁Every", -10.22950553894043 ], [ "▁bag", -10.229736328125 ], [ "▁daca", -10.22974967956543 ], [ "▁Apple", -10.23022174835205 ], [ "▁Mark", -10.230239868164062 ], [ "▁larger", -10.231284141540527 ], [ "eze", -10.231978416442871 ], [ "▁progress", -10.232234001159668 ], [ "▁stress", -10.232929229736328 ], [ "▁cards", -10.233663558959961 ], [ "▁driving", -10.233738899230957 ], [ "▁dry", -10.233970642089844 ], [ "▁relevant", -10.234556198120117 ], [ "▁Jo", -10.234825134277344 ], [ "▁tree", -10.235036849975586 ], [ "▁reported", -10.235770225524902 ], [ "ities", -10.23577880859375 ], [ "▁tea", -10.235806465148926 ], [ "▁although", -10.236145973205566 ], [ "▁Research", -10.236261367797852 ], [ "▁pool", -10.23691463470459 ], [ "▁fin", -10.237163543701172 ], [ "▁Und", -10.238130569458008 ], [ "▁decide", -10.239217758178711 ], [ "▁expert", -10.239344596862793 ], [ "rate", -10.239428520202637 ], [ "zeit", -10.239971160888672 ], [ "▁26", -10.24040412902832 ], [ "▁Ka", -10.24056339263916 ], [ "▁fix", -10.240666389465332 ], [ "igen", -10.240713119506836 ], [ "▁direction", -10.241188049316406 ], [ "▁star", -10.241661071777344 ], [ "▁middle", -10.241889953613281 ], [ "▁Ja", -10.241962432861328 ], [ "▁Land", -10.24207878112793 ], [ "ken", -10.242605209350586 ], [ "▁button", -10.242630004882812 ], [ "▁rules", -10.242656707763672 ], [ "▁également", -10.242706298828125 ], [ "▁viel", -10.243158340454102 ], [ "▁welcome", -10.243682861328125 ], [ "că", -10.243932723999023 ], [ "▁Top", -10.245308876037598 ], [ "▁allowed", -10.245487213134766 ], [ "▁tip", -10.245584487915039 ], [ "▁cei", -10.245768547058105 ], [ "▁Nous", -10.246004104614258 ], [ "té", -10.246850967407227 ], [ "▁unei", -10.246903419494629 ], [ "▁efforts", -10.247260093688965 ], [ "▁note", -10.247719764709473 ], [ "▁title", -10.247977256774902 ], [ "ric", -10.248047828674316 ], [ "berg", -10.248252868652344 ], [ "▁ainsi", -10.248576164245605 ], [ "▁led", -10.248713493347168 ], [ "▁alone", -10.248786926269531 ], [ "ward", -10.249215126037598 ], [ "▁vie", -10.249323844909668 ], [ "▁brain", -10.249427795410156 ], [ "light", -10.250100135803223 ], [ "▁Court", -10.250598907470703 ], [ "set", -10.250869750976562 ], [ "▁steps", -10.251251220703125 ], [ "pri", -10.251391410827637 ], [ "Q", -10.251654624938965 ], [ "sti", -10.251938819885254 ], [ "▁voice", -10.252121925354004 ], [ "▁models", -10.252705574035645 ], [ "▁parties", -10.25442886352539 ], [ "▁radio", -10.255270957946777 ], [ "▁mission", -10.25545883178711 ], [ "▁methods", -10.255658149719238 ], [ "▁Te", -10.256019592285156 ], [ "air", -10.256489753723145 ], [ "▁essay", -10.256719589233398 ], [ "my", -10.256826400756836 ], [ "▁competition", -10.257049560546875 ], [ "ses", -10.257447242736816 ], [ "▁serious", -10.258724212646484 ], [ "▁Ti", -10.258733749389648 ], [ "▁Hand", -10.259561538696289 ], [ "not", -10.25958251953125 ], [ "▁winter", -10.261277198791504 ], [ "24", -10.261724472045898 ], [ "▁vision", -10.26174545288086 ], [ "▁technical", -10.262110710144043 ], [ "▁cross", -10.262799263000488 ], [ "▁update", -10.262947082519531 ], [ "▁Team", -10.263564109802246 ], [ "▁evening", -10.264286041259766 ], [ "▁experts", -10.26435661315918 ], [ "part", -10.264640808105469 ], [ "▁wo", -10.265190124511719 ], [ "▁App", -10.265729904174805 ], [ "▁peu", -10.266267776489258 ], [ "▁mich", -10.26630687713623 ], [ "▁reports", -10.267001152038574 ], [ "▁km", -10.267594337463379 ], [ "▁print", -10.2678804397583 ], [ "▁Hotel", -10.268101692199707 ], [ "▁earlier", -10.268235206604004 ], [ "▁uses", -10.26826286315918 ], [ "▁menu", -10.268416404724121 ], [ "▁miles", -10.26845645904541 ], [ "▁classes", -10.268463134765625 ], [ "▁mo", -10.268525123596191 ], [ "▁loan", -10.2691011428833 ], [ "▁host", -10.269192695617676 ], [ "▁author", -10.269274711608887 ], [ "-1", -10.269434928894043 ], [ "▁bun", -10.269940376281738 ], [ "19", -10.270011901855469 ], [ "uch", -10.270670890808105 ], [ "ble", -10.270813941955566 ], [ "▁holiday", -10.270859718322754 ], [ "los", -10.271894454956055 ], [ "▁looked", -10.272663116455078 ], [ "▁Test", -10.272759437561035 ], [ "▁moved", -10.273000717163086 ], [ "▁numbers", -10.273306846618652 ], [ "▁covered", -10.273405075073242 ], [ "ker", -10.273696899414062 ], [ "TM", -10.273768424987793 ], [ "▁album", -10.274727821350098 ], [ "▁27", -10.27476692199707 ], [ "▁când", -10.27523422241211 ], [ "▁shopping", -10.275248527526855 ], [ "▁Ihr", -10.27531623840332 ], [ "▁requires", -10.275786399841309 ], [ "▁USA", -10.275909423828125 ], [ "000", -10.275951385498047 ], [ "▁official", -10.276010513305664 ], [ "▁states", -10.276346206665039 ], [ "▁tips", -10.276570320129395 ], [ "ible", -10.277321815490723 ], [ "▁Lu", -10.27756404876709 ], [ "ces", -10.278343200683594 ], [ "▁figure", -10.27839469909668 ], [ "▁Take", -10.278576850891113 ], [ "▁după", -10.278687477111816 ], [ "▁teams", -10.278980255126953 ], [ "▁song", -10.279138565063477 ], [ "▁master", -10.279386520385742 ], [ "ED", -10.279841423034668 ], [ "▁cleaning", -10.280523300170898 ], [ "▁drop", -10.280651092529297 ], [ "▁primary", -10.2808837890625 ], [ "▁Life", -10.28108024597168 ], [ "▁carry", -10.281129837036133 ], [ "▁initial", -10.281270980834961 ], [ "▁encore", -10.281617164611816 ], [ "▁Add", -10.281670570373535 ], [ "▁woman", -10.282076835632324 ], [ "▁Water", -10.282219886779785 ], [ "▁advantage", -10.28277587890625 ], [ "see", -10.283234596252441 ], [ "ré", -10.283341407775879 ], [ "▁motor", -10.283479690551758 ], [ "mel", -10.2838716506958 ], [ "▁finding", -10.284419059753418 ], [ "▁plastic", -10.286365509033203 ], [ "▁IT", -10.286602973937988 ], [ "▁Church", -10.286916732788086 ], [ "▁shape", -10.287345886230469 ], [ "▁gets", -10.287763595581055 ], [ "▁followed", -10.288186073303223 ], [ "▁100%", -10.288315773010254 ], [ "▁Program", -10.28912353515625 ], [ "▁Another", -10.28934383392334 ], [ "▁zwei", -10.289522171020508 ], [ "▁father", -10.289839744567871 ], [ "▁rich", -10.290282249450684 ], [ "où", -10.290810585021973 ], [ "▁lines", -10.290934562683105 ], [ "▁distance", -10.291757583618164 ], [ "▁cell", -10.291876792907715 ], [ "▁parte", -10.292072296142578 ], [ "bit", -10.292445182800293 ], [ "▁perhaps", -10.292749404907227 ], [ "rii", -10.293590545654297 ], [ "▁session", -10.294137954711914 ], [ "▁Pentru", -10.294528007507324 ], [ "ING", -10.295049667358398 ], [ "ants", -10.295478820800781 ], [ "▁remain", -10.295543670654297 ], [ "13", -10.295588493347168 ], [ "▁finished", -10.295763969421387 ], [ "bel", -10.298725128173828 ], [ "▁organizations", -10.299455642700195 ], [ "▁Any", -10.299896240234375 ], [ "▁taste", -10.300277709960938 ], [ "Whether", -10.300600051879883 ], [ "ram", -10.300874710083008 ], [ "like", -10.301307678222656 ], [ "▁artist", -10.301319122314453 ], [ "aire", -10.303369522094727 ], [ "▁French", -10.303386688232422 ], [ "▁donc", -10.303634643554688 ], [ "ow", -10.30386734008789 ], [ "▁200", -10.303993225097656 ], [ "▁paint", -10.304465293884277 ], [ "▁Open", -10.304535865783691 ], [ "▁appear", -10.304722785949707 ], [ "▁Washington", -10.304765701293945 ], [ "▁target", -10.30491828918457 ], [ "pir", -10.305578231811523 ], [ "▁generally", -10.305987358093262 ], [ "▁British", -10.306790351867676 ], [ "▁seven", -10.306937217712402 ], [ "▁bio", -10.307162284851074 ], [ "▁sector", -10.307358741760254 ], [ "90", -10.30777359008789 ], [ "▁fapt", -10.307881355285645 ], [ "▁prefer", -10.308316230773926 ], [ "▁partner", -10.308427810668945 ], [ "ăm", -10.308547973632812 ], [ "▁diverse", -10.308610916137695 ], [ "▁onto", -10.309283256530762 ], [ "▁refer", -10.309828758239746 ], [ "▁Law", -10.310302734375 ], [ "▁Ri", -10.310596466064453 ], [ "▁critical", -10.310735702514648 ], [ "▁copy", -10.310897827148438 ], [ "ck", -10.311517715454102 ], [ "ix", -10.311732292175293 ], [ "tag", -10.311793327331543 ], [ "▁Road", -10.311936378479004 ], [ "▁concern", -10.312053680419922 ], [ "▁maximum", -10.312095642089844 ], [ "▁train", -10.312148094177246 ], [ "▁într", -10.312189102172852 ], [ "ura", -10.313023567199707 ], [ "▁Qu", -10.313481330871582 ], [ "▁links", -10.313538551330566 ], [ "▁audience", -10.313969612121582 ], [ "▁foot", -10.314554214477539 ], [ "▁Blue", -10.314605712890625 ], [ "ification", -10.315386772155762 ], [ "▁developing", -10.315847396850586 ], [ "▁interior", -10.315876007080078 ], [ "=", -10.316556930541992 ], [ "▁aceasta", -10.31698989868164 ], [ "▁dedicated", -10.317373275756836 ], [ "▁movement", -10.317383766174316 ], [ "sta", -10.318868637084961 ], [ "▁challenges", -10.319018363952637 ], [ "inte", -10.319074630737305 ], [ "▁Euro", -10.319075584411621 ], [ "▁classic", -10.320341110229492 ], [ "▁Um", -10.320767402648926 ], [ "▁alternative", -10.321407318115234 ], [ "mann", -10.321614265441895 ], [ "▁Une", -10.322278022766113 ], [ "qu", -10.322415351867676 ], [ "▁heavy", -10.322434425354004 ], [ "▁install", -10.322484970092773 ], [ "▁fiind", -10.322504043579102 ], [ "▁leaders", -10.323003768920898 ], [ "▁views", -10.323019981384277 ], [ "▁www", -10.323084831237793 ], [ "▁standards", -10.323270797729492 ], [ "ong", -10.323580741882324 ], [ "40", -10.323833465576172 ], [ "▁cm", -10.323848724365234 ], [ "▁park", -10.324324607849121 ], [ "▁himself", -10.324419021606445 ], [ "▁People", -10.324649810791016 ], [ "▁separate", -10.324843406677246 ], [ "▁secure", -10.325018882751465 ], [ "sie", -10.325084686279297 ], [ "▁maintenance", -10.325199127197266 ], [ "▁encourage", -10.32766056060791 ], [ "ein", -10.328139305114746 ], [ "▁reviews", -10.328202247619629 ], [ "▁Michael", -10.328210830688477 ], [ "▁background", -10.328283309936523 ], [ "▁therefore", -10.328433990478516 ], [ "▁server", -10.328487396240234 ], [ "▁dream", -10.328742027282715 ], [ "ping", -10.329025268554688 ], [ "▁block", -10.329855918884277 ], [ "▁2009", -10.330734252929688 ], [ "▁facilities", -10.330931663513184 ], [ "▁II", -10.331367492675781 ], [ "▁attend", -10.33156967163086 ], [ "▁cap", -10.33224105834961 ], [ "35", -10.332416534423828 ], [ "▁steel", -10.332796096801758 ], [ "▁shared", -10.333391189575195 ], [ "▁doctor", -10.333939552307129 ], [ "▁River", -10.33411693572998 ], [ "▁Bay", -10.334456443786621 ], [ "▁length", -10.335005760192871 ], [ "▁jobs", -10.335466384887695 ], [ "▁Plus", -10.335992813110352 ], [ "▁station", -10.336140632629395 ], [ "▁elements", -10.336268424987793 ], [ "▁rock", -10.336668014526367 ], [ "▁professionals", -10.336670875549316 ], [ "cle", -10.336777687072754 ], [ "▁dont", -10.336873054504395 ], [ "urilor", -10.337142944335938 ], [ "▁gain", -10.337271690368652 ], [ "▁programme", -10.337540626525879 ], [ "▁Cor", -10.338377952575684 ], [ "▁leader", -10.338542938232422 ], [ "ării", -10.33876895904541 ], [ "▁>", -10.339137077331543 ], [ "▁task", -10.339471817016602 ], [ "▁seeing", -10.339943885803223 ], [ "▁statement", -10.34045696258545 ], [ "vin", -10.341094017028809 ], [ "▁fish", -10.341700553894043 ], [ "▁advanced", -10.342403411865234 ], [ "▁discuss", -10.342494010925293 ], [ "die", -10.342904090881348 ], [ "isch", -10.342944145202637 ], [ "▁plenty", -10.342947959899902 ], [ "▁Hall", -10.343120574951172 ], [ "▁Other", -10.343339920043945 ], [ "▁homes", -10.344944953918457 ], [ "▁Ni", -10.345016479492188 ], [ "▁testing", -10.345102310180664 ], [ "▁Last", -10.345392227172852 ], [ "▁Note", -10.345595359802246 ], [ "▁talking", -10.345934867858887 ], [ "▁exchange", -10.347042083740234 ], [ "▁exercise", -10.347189903259277 ], [ "▁cea", -10.347546577453613 ], [ "▁wife", -10.34820556640625 ], [ "▁Für", -10.348480224609375 ], [ "▁Texas", -10.34981918334961 ], [ "▁fr", -10.35065746307373 ], [ "▁speak", -10.350894927978516 ], [ "17", -10.351007461547852 ], [ "70", -10.351462364196777 ], [ "▁promote", -10.351851463317871 ], [ "tul", -10.351990699768066 ], [ "apos", -10.35208511352539 ], [ "▁Jahr", -10.35214900970459 ], [ "▁Trump", -10.352204322814941 ], [ "▁ohne", -10.352357864379883 ], [ "▁learned", -10.353700637817383 ], [ "▁Sp", -10.353803634643555 ], [ "▁owner", -10.354275703430176 ], [ "mor", -10.354422569274902 ], [ "▁fois", -10.354452133178711 ], [ "▁meaning", -10.35518741607666 ], [ "▁dacă", -10.355249404907227 ], [ "nic", -10.355484008789062 ], [ "а", -10.355525970458984 ], [ "14", -10.355767250061035 ], [ "▁driver", -10.356258392333984 ], [ "▁Amazon", -10.3567533493042 ], [ "▁flow", -10.358469009399414 ], [ "▁shot", -10.358726501464844 ], [ "▁sous", -10.35914421081543 ], [ "▁Gold", -10.359339714050293 ], [ "▁straight", -10.359562873840332 ], [ "▁conference", -10.359610557556152 ], [ "▁peste", -10.359662055969238 ], [ "whose", -10.36030101776123 ], [ "▁installation", -10.36050796508789 ], [ "▁produced", -10.360607147216797 ], [ "▁independent", -10.36192512512207 ], [ "▁Institute", -10.362021446228027 ], [ "▁James", -10.362373352050781 ], [ "▁mental", -10.362601280212402 ], [ "ara", -10.362798690795898 ], [ "ium", -10.363021850585938 ], [ "▁husband", -10.36306095123291 ], [ "▁guests", -10.363907814025879 ], [ "27", -10.364319801330566 ], [ "▁Che", -10.364651679992676 ], [ "▁Indian", -10.364694595336914 ], [ "zer", -10.36478042602539 ], [ "▁minimum", -10.364962577819824 ], [ "500", -10.365096092224121 ], [ "▁sit", -10.36561393737793 ], [ "put", -10.36656379699707 ], [ "▁avea", -10.36665153503418 ], [ "▁ride", -10.367088317871094 ], [ "gan", -10.367152214050293 ], [ "▁Ke", -10.36747932434082 ], [ "book", -10.367515563964844 ], [ "ages", -10.368019104003906 ], [ "▁presented", -10.368157386779785 ], [ "▁Com", -10.368927955627441 ], [ "▁Call", -10.369053840637207 ], [ "▁fee", -10.369847297668457 ], [ "ări", -10.369905471801758 ], [ "▁putea", -10.37072467803955 ], [ "▁Public", -10.371030807495117 ], [ "▁pa", -10.371152877807617 ], [ "28", -10.371233940124512 ], [ "▁Director", -10.37126350402832 ], [ "▁contains", -10.3717622756958 ], [ "▁factors", -10.372554779052734 ], [ "▁famous", -10.372614860534668 ], [ "▁bathroom", -10.373040199279785 ], [ "▁core", -10.37353229522705 ], [ "▁viele", -10.373610496520996 ], [ "▁acum", -10.374361991882324 ], [ "▁animal", -10.374407768249512 ], [ "▁Ihnen", -10.374425888061523 ], [ "▁Find", -10.374545097351074 ], [ "▁Fall", -10.374861717224121 ], [ "ford", -10.376051902770996 ], [ "▁coverage", -10.3765287399292 ], [ "▁smart", -10.376830101013184 ], [ "ries", -10.376893997192383 ], [ "▁memory", -10.3772554397583 ], [ "▁dance", -10.377443313598633 ], [ "11", -10.37746810913086 ], [ "▁communities", -10.377655982971191 ], [ "eurs", -10.378050804138184 ], [ "▁Florida", -10.378463745117188 ], [ "▁sport", -10.379366874694824 ], [ "▁bus", -10.37992000579834 ], [ "▁colors", -10.379969596862793 ], [ "▁affect", -10.380044937133789 ], [ "▁score", -10.380183219909668 ], [ "▁properties", -10.38050365447998 ], [ "18", -10.380593299865723 ], [ "▁astfel", -10.381312370300293 ], [ "▁beach", -10.382407188415527 ], [ "▁friendly", -10.382795333862305 ], [ "izing", -10.38288688659668 ], [ "▁buying", -10.383146286010742 ], [ "▁forget", -10.383195877075195 ], [ "este", -10.383198738098145 ], [ "▁capacity", -10.38360595703125 ], [ "▁lose", -10.383692741394043 ], [ "▁listed", -10.38407039642334 ], [ "ica", -10.384084701538086 ], [ "han", -10.384085655212402 ], [ "▁selbst", -10.384390830993652 ], [ "▁values", -10.384391784667969 ], [ "▁Power", -10.384559631347656 ], [ "▁comments", -10.384831428527832 ], [ "eux", -10.385346412658691 ], [ "ați", -10.385419845581055 ], [ "▁context", -10.385710716247559 ], [ "liche", -10.385944366455078 ], [ "▁keeping", -10.38620662689209 ], [ "▁2008", -10.38647174835205 ], [ "▁su", -10.386670112609863 ], [ "▁biggest", -10.386838912963867 ], [ "▁fiecare", -10.387356758117676 ], [ "ight", -10.38845157623291 ], [ "▁toute", -10.389808654785156 ], [ "▁dinner", -10.389827728271484 ], [ "bau", -10.390706062316895 ], [ "▁Mai", -10.390762329101562 ], [ "▁status", -10.390776634216309 ], [ "rez", -10.391340255737305 ], [ "▁selected", -10.391549110412598 ], [ "▁cells", -10.392601013183594 ], [ "▁eight", -10.393319129943848 ], [ "▁package", -10.393320083618164 ], [ "▁scale", -10.39333724975586 ], [ "din", -10.39336109161377 ], [ "▁Who", -10.393381118774414 ], [ "▁century", -10.393399238586426 ], [ "▁bi", -10.393516540527344 ], [ "▁Africa", -10.39384937286377 ], [ "▁http", -10.394133567810059 ], [ "▁named", -10.394230842590332 ], [ "▁adding", -10.394901275634766 ], [ "▁mention", -10.395039558410645 ], [ "▁casino", -10.395421981811523 ], [ "▁couldn", -10.395624160766602 ], [ "▁outdoor", -10.395912170410156 ], [ "▁sugar", -10.3960542678833 ], [ "▁prepared", -10.396124839782715 ], [ "21", -10.396528244018555 ], [ "▁Ba", -10.396632194519043 ], [ "vers", -10.396697998046875 ], [ "ration", -10.396773338317871 ], [ "▁ja", -10.397035598754883 ], [ "▁aspect", -10.397224426269531 ], [ "▁31", -10.397462844848633 ], [ "▁treat", -10.397475242614746 ], [ "tru", -10.397841453552246 ], [ "▁flat", -10.397890090942383 ], [ "32", -10.397989273071289 ], [ "▁reality", -10.398238182067871 ], [ "▁waste", -10.39876937866211 ], [ "▁King", -10.399649620056152 ], [ "▁drug", -10.399870872497559 ], [ "▁operations", -10.400120735168457 ], [ "▁aim", -10.40042495727539 ], [ "▁fans", -10.400444984436035 ], [ "▁vers", -10.400891304016113 ], [ "▁plants", -10.400971412658691 ], [ "▁Dis", -10.401477813720703 ], [ "▁Daten", -10.401510238647461 ], [ "être", -10.40267276763916 ], [ "▁placed", -10.40326976776123 ], [ "▁bon", -10.403977394104004 ], [ "beim", -10.4041109085083 ], [ "▁slow", -10.40501880645752 ], [ "cri", -10.405512809753418 ], [ "▁Care", -10.405691146850586 ], [ "mes", -10.406211853027344 ], [ "26", -10.406257629394531 ], [ "box", -10.406330108642578 ], [ "▁helpful", -10.406362533569336 ], [ "▁documents", -10.406543731689453 ], [ "▁visitors", -10.406773567199707 ], [ "ture", -10.406862258911133 ], [ "▁Menschen", -10.406891822814941 ], [ "▁Chi", -10.406975746154785 ], [ "▁recipe", -10.40764045715332 ], [ "▁kept", -10.407693862915039 ], [ "▁Grand", -10.407915115356445 ], [ "▁operating", -10.408178329467773 ], [ "point", -10.408329010009766 ], [ "▁bin", -10.40837287902832 ], [ "▁Tri", -10.40845775604248 ], [ "Be", -10.408512115478516 ], [ "▁experiences", -10.40856647491455 ], [ "▁academic", -10.408608436584473 ], [ "▁finden", -10.40870475769043 ], [ "▁sera", -10.409092903137207 ], [ "act", -10.410541534423828 ], [ "▁Pa", -10.410907745361328 ], [ "▁society", -10.411056518554688 ], [ "▁combination", -10.411237716674805 ], [ "5%", -10.41182804107666 ], [ "▁owners", -10.41188907623291 ], [ "▁poor", -10.412039756774902 ], [ "▁Robert", -10.412378311157227 ], [ "▁military", -10.412964820861816 ], [ "▁economy", -10.413033485412598 ], [ "▁aware", -10.413055419921875 ], [ "rot", -10.413443565368652 ], [ "mie", -10.413544654846191 ], [ "▁Thursday", -10.414399147033691 ], [ "▁2011", -10.41490650177002 ], [ "▁fantastic", -10.41554069519043 ], [ "▁numerous", -10.415921211242676 ], [ "▁fair", -10.4165620803833 ], [ "med", -10.416753768920898 ], [ "▁welche", -10.416893005371094 ], [ "▁fruit", -10.41712760925293 ], [ "ku", -10.417325019836426 ], [ "▁Social", -10.417583465576172 ], [ "▁funds", -10.418157577514648 ], [ "▁atunci", -10.418214797973633 ], [ "▁Part", -10.418238639831543 ], [ "▁Big", -10.418301582336426 ], [ "▁2010", -10.419414520263672 ], [ "▁detail", -10.419889450073242 ], [ "▁Peter", -10.419942855834961 ], [ "ani", -10.420196533203125 ], [ "▁Wie", -10.420795440673828 ], [ "▁Tu", -10.421649932861328 ], [ "ear", -10.421706199645996 ], [ "▁Wenn", -10.421941757202148 ], [ "▁manager", -10.42199993133545 ], [ "▁Dan", -10.422409057617188 ], [ "▁Pi", -10.42257308959961 ], [ "▁wants", -10.422652244567871 ], [ "▁Data", -10.42322826385498 ], [ "pos", -10.42387580871582 ], [ "▁older", -10.423946380615234 ], [ "▁Download", -10.424071311950684 ], [ "▁Was", -10.424107551574707 ], [ "▁corner", -10.424195289611816 ], [ "▁president", -10.424199104309082 ], [ "mas", -10.424248695373535 ], [ "▁smaller", -10.424361228942871 ], [ "▁bright", -10.424459457397461 ], [ "▁proper", -10.424582481384277 ], [ "▁Kinder", -10.424637794494629 ], [ "▁Two", -10.424668312072754 ], [ "▁award", -10.42471694946289 ], [ "▁premier", -10.425211906433105 ], [ "▁seek", -10.425646781921387 ], [ "▁thank", -10.425662994384766 ], [ "▁proud", -10.426509857177734 ], [ "▁workers", -10.426774024963379 ], [ "▁2000", -10.426970481872559 ], [ "▁gone", -10.427482604980469 ], [ "▁medium", -10.427693367004395 ], [ "▁grade", -10.42777156829834 ], [ "▁Ru", -10.427800178527832 ], [ "cro", -10.427851676940918 ], [ "▁interview", -10.428311347961426 ], [ "23", -10.428787231445312 ], [ "▁mari", -10.429442405700684 ], [ "▁80", -10.429756164550781 ], [ "▁Ga", -10.430047035217285 ], [ "▁90", -10.431839942932129 ], [ "▁anderen", -10.432605743408203 ], [ "▁cultural", -10.433018684387207 ], [ "but", -10.433144569396973 ], [ "rum", -10.433300018310547 ], [ "get", -10.43338680267334 ], [ "▁pop", -10.433582305908203 ], [ "▁Information", -10.433594703674316 ], [ "▁press", -10.434972763061523 ], [ "▁Project", -10.435359001159668 ], [ "▁excited", -10.435755729675293 ], [ "▁Saint", -10.436088562011719 ], [ "▁England", -10.436192512512207 ], [ "▁beauty", -10.43643856048584 ], [ "▁agreement", -10.436464309692383 ], [ "▁Like", -10.437565803527832 ], [ "▁strength", -10.437664985656738 ], [ "▁waiting", -10.438165664672852 ], [ "и", -10.438270568847656 ], [ "Le", -10.438329696655273 ], [ "▁residents", -10.43835735321045 ], [ "▁Ben", -10.438603401184082 ], [ "▁mentioned", -10.439260482788086 ], [ "▁etwas", -10.43930721282959 ], [ "▁rooms", -10.439347267150879 ], [ "▁neue", -10.439501762390137 ], [ "▁Microsoft", -10.439726829528809 ], [ "▁passed", -10.440205574035645 ], [ "▁sea", -10.440893173217773 ], [ "▁electric", -10.441244125366211 ], [ "▁forms", -10.441384315490723 ], [ "▁Central", -10.441597938537598 ], [ "▁Lord", -10.442625999450684 ], [ "ute", -10.442763328552246 ], [ "▁pré", -10.442790031433105 ], [ "▁square", -10.44308090209961 ], [ "itatea", -10.443451881408691 ], [ "▁debt", -10.443757057189941 ], [ "▁street", -10.443975448608398 ], [ "▁pi", -10.444917678833008 ], [ "▁happened", -10.445326805114746 ], [ "▁Tuesday", -10.445592880249023 ], [ "recht", -10.446094512939453 ], [ "▁Eine", -10.44627857208252 ], [ "▁Set", -10.446768760681152 ], [ "▁federal", -10.4468412399292 ], [ "CC", -10.446905136108398 ], [ "....", -10.446938514709473 ], [ "lig", -10.447463035583496 ], [ "▁Christian", -10.44870662689209 ], [ "▁truth", -10.449213981628418 ], [ "▁map", -10.449728012084961 ], [ "▁secret", -10.449979782104492 ], [ "▁Chinese", -10.450844764709473 ], [ "hol", -10.450895309448242 ], [ "▁wrote", -10.451505661010742 ], [ "▁hospital", -10.451783180236816 ], [ "▁Island", -10.451870918273926 ], [ "▁frame", -10.451946258544922 ], [ "▁sources", -10.452117919921875 ], [ "pan", -10.453242301940918 ], [ "▁29", -10.453530311584473 ], [ "▁changing", -10.454547882080078 ], [ "▁Where", -10.454627990722656 ], [ "▁negative", -10.45471477508545 ], [ "▁processes", -10.45491886138916 ], [ "▁leadership", -10.455029487609863 ], [ "▁nos", -10.455195426940918 ], [ "▁info", -10.455780029296875 ], [ "▁Gu", -10.45595645904541 ], [ "▁CO", -10.45605182647705 ], [ "▁reference", -10.456884384155273 ], [ "▁corporate", -10.457097053527832 ], [ "▁characters", -10.457563400268555 ], [ "▁dining", -10.4577054977417 ], [ "▁becoming", -10.459708213806152 ], [ "▁4.", -10.460311889648438 ], [ "▁Science", -10.460626602172852 ], [ "▁Education", -10.461943626403809 ], [ "▁camp", -10.46207046508789 ], [ "fall", -10.462146759033203 ], [ "▁Auch", -10.462471961975098 ], [ "▁topic", -10.462519645690918 ], [ "▁influence", -10.463460922241211 ], [ "▁70", -10.463892936706543 ], [ "▁identify", -10.464459419250488 ], [ "▁(19", -10.464646339416504 ], [ "care", -10.465216636657715 ], [ "ions", -10.466215133666992 ], [ "ray", -10.4663724899292 ], [ "▁Both", -10.466577529907227 ], [ "▁collect", -10.466997146606445 ], [ "▁practices", -10.467667579650879 ], [ "▁fight", -10.468058586120605 ], [ "▁injury", -10.46873664855957 ], [ "▁nici", -10.46905517578125 ], [ "▁depuis", -10.469563484191895 ], [ "▁actions", -10.469609260559082 ], [ "▁Wednesday", -10.47089958190918 ], [ "▁bill", -10.471086502075195 ], [ "▁cheap", -10.471318244934082 ], [ "lui", -10.471719741821289 ], [ "▁awesome", -10.471731185913086 ], [ "tig", -10.472554206848145 ], [ "▁expensive", -10.472636222839355 ], [ "ceea", -10.472834587097168 ], [ "▁exact", -10.472907066345215 ], [ "22", -10.473462104797363 ], [ "▁avant", -10.47352123260498 ], [ "▁fat", -10.47353744506836 ], [ "▁spending", -10.474353790283203 ], [ "▁designs", -10.47608470916748 ], [ "▁damit", -10.4761323928833 ], [ "▁comp", -10.47619342803955 ], [ "▁whatever", -10.476434707641602 ], [ "▁Light", -10.476442337036133 ], [ "▁quarter", -10.47680377960205 ], [ "hand", -10.477301597595215 ], [ "▁connected", -10.477584838867188 ], [ "▁technologies", -10.47772216796875 ], [ "ges", -10.477808952331543 ], [ "▁shower", -10.478998184204102 ], [ "▁500", -10.47923469543457 ], [ "▁Time", -10.479436874389648 ], [ "▁zone", -10.480525970458984 ], [ "▁vote", -10.480624198913574 ], [ "▁andere", -10.480871200561523 ], [ "▁otherwise", -10.480988502502441 ], [ "tur", -10.481294631958008 ], [ "▁happens", -10.481504440307617 ], [ "hin", -10.481597900390625 ], [ "▁volume", -10.482161521911621 ], [ "▁thousands", -10.482391357421875 ], [ "war", -10.482551574707031 ], [ "▁Play", -10.482900619506836 ], [ "▁temperature", -10.48371410369873 ], [ "▁industrial", -10.483830451965332 ], [ "▁fuel", -10.483915328979492 ], [ "100", -10.48409366607666 ], [ "top", -10.484210014343262 ], [ "kin", -10.484312057495117 ], [ "▁efficient", -10.484414100646973 ], [ "teil", -10.484525680541992 ], [ "alt", -10.484578132629395 ], [ "▁monde", -10.48483657836914 ], [ "▁Ra", -10.484899520874023 ], [ "▁bedroom", -10.485103607177734 ], [ "▁showing", -10.485316276550293 ], [ "▁continued", -10.485490798950195 ], [ "▁Plan", -10.48552131652832 ], [ "▁assistance", -10.486014366149902 ], [ "▁discover", -10.48622989654541 ], [ "▁Year", -10.486238479614258 ], [ "▁applied", -10.486433029174805 ], [ "▁audio", -10.48755931854248 ], [ "▁thus", -10.487645149230957 ], [ "▁permet", -10.48806095123291 ], [ "▁fashion", -10.488532066345215 ], [ "cra", -10.488645553588867 ], [ "ious", -10.488700866699219 ], [ "▁focused", -10.489258766174316 ], [ "16", -10.48930549621582 ], [ "▁arm", -10.489364624023438 ], [ "▁Their", -10.489789962768555 ], [ "▁Foundation", -10.49022388458252 ], [ "▁majority", -10.49022388458252 ], [ "▁wind", -10.490785598754883 ], [ "▁bought", -10.491056442260742 ], [ "▁factor", -10.491918563842773 ], [ "▁opened", -10.49213695526123 ], [ "tern", -10.492374420166016 ], [ "▁cars", -10.492597579956055 ], [ "▁exciting", -10.492691040039062 ], [ "▁affordable", -10.493510246276855 ], [ "ches", -10.493563652038574 ], [ "▁panel", -10.493720054626465 ], [ "▁caused", -10.493793487548828 ], [ "▁travail", -10.493998527526855 ], [ "▁roof", -10.494073867797852 ], [ "▁enable", -10.494202613830566 ], [ "▁toward", -10.494491577148438 ], [ "▁Development", -10.494688987731934 ], [ "▁foreign", -10.495308876037598 ], [ "avi", -10.495320320129395 ], [ "long", -10.495328903198242 ], [ "De", -10.49578857421875 ], [ "▁Mon", -10.49588394165039 ], [ "▁Va", -10.495942115783691 ], [ "AP", -10.496097564697266 ], [ "▁asta", -10.49720573425293 ], [ "▁prepare", -10.497220993041992 ], [ "▁German", -10.497261047363281 ], [ "▁Centre", -10.497325897216797 ], [ "ère", -10.497367858886719 ], [ "▁fear", -10.497537612915039 ], [ "▁Este", -10.497878074645996 ], [ "▁Des", -10.49793529510498 ], [ "▁Kon", -10.499308586120605 ], [ "á", -10.499866485595703 ], [ "stand", -10.500805854797363 ], [ "▁Real", -10.500842094421387 ], [ "lichen", -10.50098705291748 ], [ "▁Beach", -10.501455307006836 ], [ "▁expertise", -10.50185775756836 ], [ "▁route", -10.502445220947266 ], [ "▁nation", -10.502551078796387 ], [ "▁snow", -10.503022193908691 ], [ "▁articles", -10.503127098083496 ], [ "▁Wood", -10.504426956176758 ], [ "▁operation", -10.50494384765625 ], [ "▁passion", -10.505215644836426 ], [ "▁cand", -10.505690574645996 ], [ "haus", -10.505701065063477 ], [ "OR", -10.505711555480957 ], [ "▁senior", -10.506511688232422 ], [ "▁becomes", -10.506546020507812 ], [ "▁sounds", -10.506878852844238 ], [ "▁enjoyed", -10.50704574584961 ], [ "▁gegen", -10.507533073425293 ], [ "▁courses", -10.507919311523438 ], [ "▁absolutely", -10.508257865905762 ], [ "tim", -10.508264541625977 ], [ "uff", -10.508516311645508 ], [ "▁moins", -10.50860595703125 ], [ "▁TO", -10.509060859680176 ], [ "▁fabric", -10.509267807006836 ], [ "poli", -10.509326934814453 ], [ "▁Bre", -10.509761810302734 ], [ "▁bo", -10.509916305541992 ], [ "▁Elle", -10.510469436645508 ], [ "bu", -10.512336730957031 ], [ "▁participants", -10.512401580810547 ], [ "stone", -10.512794494628906 ], [ "ties", -10.51366138458252 ], [ "▁listen", -10.513700485229492 ], [ "▁Spiel", -10.513752937316895 ], [ "pot", -10.513872146606445 ], [ "▁selling", -10.514358520507812 ], [ "▁geht", -10.514680862426758 ], [ "▁mini", -10.515146255493164 ], [ "▁trans", -10.515408515930176 ], [ "▁ingredients", -10.515642166137695 ], [ "auf", -10.515671730041504 ], [ "▁orice", -10.51595401763916 ], [ "▁Next", -10.516300201416016 ], [ "▁cream", -10.516756057739258 ], [ "▁edge", -10.516973495483398 ], [ "▁recommended", -10.517022132873535 ], [ "▁Form", -10.517277717590332 ], [ "▁processing", -10.51746940612793 ], [ "vert", -10.517709732055664 ], [ "▁described", -10.518362998962402 ], [ "▁installed", -10.51884937286377 ], [ "▁managed", -10.518952369689941 ], [ "▁electronic", -10.518966674804688 ], [ "▁performed", -10.519064903259277 ], [ "▁raise", -10.519098281860352 ], [ "▁imagine", -10.519281387329102 ], [ "down", -10.51952838897705 ], [ "▁fond", -10.519978523254395 ], [ "▁Inter", -10.520434379577637 ], [ "▁Mc", -10.520550727844238 ], [ "▁Dans", -10.520679473876953 ], [ "istic", -10.520966529846191 ], [ "▁miss", -10.521052360534668 ], [ "sur", -10.521062850952148 ], [ "▁Col", -10.521879196166992 ], [ "cut", -10.522021293640137 ], [ "▁dupa", -10.522160530090332 ], [ "▁Twitter", -10.522604942321777 ], [ "▁bowl", -10.523721694946289 ], [ "▁remains", -10.5237455368042 ], [ "▁Jan", -10.524046897888184 ], [ "▁smooth", -10.524162292480469 ], [ "▁fees", -10.524415969848633 ], [ "▁aid", -10.524494171142578 ], [ "▁presence", -10.524827003479004 ], [ "▁Android", -10.52499771118164 ], [ "▁decisions", -10.52539348602295 ], [ "▁names", -10.5254487991333 ], [ "▁Music", -10.525546073913574 ], [ "▁innovative", -10.525578498840332 ], [ "▁Tom", -10.525997161865234 ], [ "▁spread", -10.526165962219238 ], [ "▁lovely", -10.526222229003906 ], [ "▁daughter", -10.526397705078125 ], [ "US", -10.527050971984863 ], [ "▁facility", -10.52710247039795 ], [ "▁peace", -10.527105331420898 ], [ "▁department", -10.527277946472168 ], [ "▁weiter", -10.527591705322266 ], [ "▁Sun", -10.527756690979004 ], [ "▁fund", -10.527772903442383 ], [ "▁2018.", -10.52792739868164 ], [ "▁discussion", -10.528186798095703 ], [ "75", -10.528799057006836 ], [ "EC", -10.529126167297363 ], [ "▁lunch", -10.529144287109375 ], [ "▁videos", -10.52927017211914 ], [ "05", -10.531253814697266 ], [ "ige", -10.531266212463379 ], [ "▁parking", -10.531564712524414 ], [ "▁relationships", -10.531732559204102 ], [ "▁George", -10.532986640930176 ], [ "▁teachers", -10.53299617767334 ], [ "room", -10.533458709716797 ], [ "▁Tra", -10.533605575561523 ], [ "▁Sam", -10.533651351928711 ], [ "▁properly", -10.535590171813965 ], [ "▁Book", -10.535629272460938 ], [ "▁CA", -10.536957740783691 ], [ "▁calls", -10.53756046295166 ], [ "▁stat", -10.538175582885742 ], [ "ux", -10.538220405578613 ], [ "▁soit", -10.538439750671387 ], [ "▁Community", -10.538684844970703 ], [ "▁Jahren", -10.538714408874512 ], [ "▁increasing", -10.539575576782227 ], [ "▁civil", -10.540184020996094 ], [ "app", -10.540573120117188 ], [ "▁35", -10.540589332580566 ], [ "▁rise", -10.540600776672363 ], [ "▁dabei", -10.540989875793457 ], [ "▁studio", -10.541803359985352 ], [ "▁policies", -10.542054176330566 ], [ "▁agent", -10.542055130004883 ], [ "▁Before", -10.542601585388184 ], [ "▁Cal", -10.543017387390137 ], [ "▁2005", -10.543404579162598 ], [ "▁sample", -10.543777465820312 ], [ "▁manner", -10.545186996459961 ], [ "wing", -10.54521369934082 ], [ "stra", -10.545552253723145 ], [ "▁fel", -10.545793533325195 ], [ "▁Show", -10.545952796936035 ], [ "▁scene", -10.54656982421875 ], [ "mic", -10.546764373779297 ], [ "nom", -10.546995162963867 ], [ "▁typically", -10.547088623046875 ], [ "▁pair", -10.547104835510254 ], [ "▁detailed", -10.547394752502441 ], [ "▁Work", -10.547422409057617 ], [ "▁cities", -10.547451972961426 ], [ "▁Rock", -10.54749584197998 ], [ "▁Gar", -10.547906875610352 ], [ "▁serving", -10.548352241516113 ], [ "▁machen", -10.548521995544434 ], [ "▁trees", -10.54888916015625 ], [ "▁accident", -10.549199104309082 ], [ "▁cloud", -10.54920482635498 ], [ "▁animals", -10.549297332763672 ], [ "▁Den", -10.549897193908691 ], [ "▁Wa", -10.54990291595459 ], [ "▁suggest", -10.550220489501953 ], [ "putting", -10.550407409667969 ], [ "▁suite", -10.550434112548828 ], [ "▁clearly", -10.550849914550781 ], [ "▁net", -10.551287651062012 ], [ "▁funding", -10.551506996154785 ], [ "▁salt", -10.551935195922852 ], [ "▁Men", -10.552119255065918 ], [ "ped", -10.552419662475586 ], [ "▁Food", -10.553142547607422 ], [ "▁leaving", -10.553544998168945 ], [ "▁Government", -10.554243087768555 ], [ "ick", -10.554381370544434 ], [ "▁seat", -10.555121421813965 ], [ "▁Los", -10.555183410644531 ], [ "▁teacher", -10.555587768554688 ], [ "▁iPhone", -10.555693626403809 ], [ "▁300", -10.556120872497559 ], [ "▁commitment", -10.556180000305176 ], [ "▁aspects", -10.556498527526855 ], [ "▁previously", -10.55711555480957 ], [ "▁cent", -10.5572509765625 ], [ "▁Vo", -10.557341575622559 ], [ "▁artists", -10.557963371276855 ], [ "▁runs", -10.558130264282227 ], [ ">", -10.558155059814453 ], [ "▁Gi", -10.558273315429688 ], [ "▁mar", -10.5585355758667 ], [ "!!!", -10.558544158935547 ], [ "▁Media", -10.558943748474121 ], [ "▁feedback", -10.559109687805176 ], [ "▁resolution", -10.559117317199707 ], [ "IN", -10.55915641784668 ], [ "▁wurden", -10.55952262878418 ], [ "▁busy", -10.559832572937012 ], [ "▁adult", -10.5600004196167 ], [ "29", -10.560487747192383 ], [ "elles", -10.561375617980957 ], [ "▁closed", -10.561762809753418 ], [ "▁trouble", -10.561767578125 ], [ "▁rent", -10.561984062194824 ], [ "lot", -10.56224536895752 ], [ "▁importance", -10.562314987182617 ], [ "▁units", -10.56257438659668 ], [ "Pro", -10.562713623046875 ], [ "▁provider", -10.563005447387695 ], [ "▁visual", -10.563288688659668 ], [ "IT", -10.563385009765625 ], [ "▁diet", -10.563733100891113 ], [ "▁appearance", -10.563932418823242 ], [ "pin", -10.564576148986816 ], [ "▁Din", -10.564760208129883 ], [ "▁eating", -10.565516471862793 ], [ "Fi", -10.565762519836426 ], [ "ball", -10.565765380859375 ], [ "är", -10.565861701965332 ], [ "ney", -10.565878868103027 ], [ "▁records", -10.566070556640625 ], [ "▁Fi", -10.566180229187012 ], [ "▁faut", -10.566329002380371 ], [ "▁CD", -10.566803932189941 ], [ "ign", -10.566930770874023 ], [ "▁vă", -10.566996574401855 ], [ "▁agency", -10.567153930664062 ], [ "ierung", -10.567323684692383 ], [ "▁Back", -10.567361831665039 ], [ "▁windows", -10.567545890808105 ], [ "▁pull", -10.567888259887695 ], [ "ash", -10.567959785461426 ], [ "▁profit", -10.568593978881836 ], [ "▁brings", -10.568605422973633 ], [ "▁Committee", -10.569122314453125 ], [ "▁girl", -10.569174766540527 ], [ "▁vehicles", -10.569372177124023 ], [ "▁Hier", -10.569567680358887 ], [ "ES", -10.569639205932617 ], [ "până", -10.569880485534668 ], [ "▁Kunden", -10.570380210876465 ], [ "pen", -10.570462226867676 ], [ "▁explain", -10.570505142211914 ], [ "▁cadru", -10.570760726928711 ], [ "▁attack", -10.571100234985352 ], [ "▁markets", -10.571115493774414 ], [ "▁claims", -10.571340560913086 ], [ "▁walking", -10.571385383605957 ], [ "▁pouv", -10.571528434753418 ], [ "low", -10.571642875671387 ], [ "▁showed", -10.572114944458008 ], [ "▁principal", -10.57211971282959 ], [ "▁lucru", -10.572144508361816 ], [ "▁precum", -10.572712898254395 ], [ "TA", -10.573094367980957 ], [ "▁partners", -10.573104858398438 ], [ "▁exist", -10.573136329650879 ], [ "▁internal", -10.57334041595459 ], [ "hen", -10.573945045471191 ], [ "▁Master", -10.573966979980469 ], [ "unless", -10.574013710021973 ], [ "▁doubt", -10.574721336364746 ], [ "$", -10.574785232543945 ], [ "▁Long", -10.574888229370117 ], [ "▁leaves", -10.574907302856445 ], [ "allowing", -10.575063705444336 ], [ "pol", -10.575272560119629 ], [ "▁Up", -10.575491905212402 ], [ "▁Contact", -10.576093673706055 ], [ "▁practical", -10.57708740234375 ], [ "▁suit", -10.57758903503418 ], [ "▁Site", -10.577656745910645 ], [ "▁formation", -10.57768726348877 ], [ "▁signal", -10.578215599060059 ], [ "▁approximately", -10.578414916992188 ], [ "▁ourselves", -10.578497886657715 ], [ "▁colour", -10.578519821166992 ], [ "▁species", -10.578530311584473 ], [ "▁advance", -10.578753471374512 ], [ "▁PM", -10.57891845703125 ], [ "ans", -10.579121589660645 ], [ "▁locations", -10.579397201538086 ], [ "vous", -10.579601287841797 ], [ "▁updated", -10.579636573791504 ], [ "▁faith", -10.579673767089844 ], [ "mus", -10.579740524291992 ], [ "▁stores", -10.579863548278809 ], [ "heim", -10.580127716064453 ], [ "▁suitable", -10.580558776855469 ], [ "▁continues", -10.580703735351562 ], [ "▁fac", -10.581133842468262 ], [ "ever", -10.581156730651855 ], [ "▁Bill", -10.581195831298828 ], [ "▁chose", -10.58121109008789 ], [ "▁inform", -10.581228256225586 ], [ "▁environmental", -10.581427574157715 ], [ "▁responsibility", -10.58188533782959 ], [ "99", -10.582542419433594 ], [ "▁competitive", -10.583723068237305 ], [ "▁strategies", -10.583903312683105 ], [ "▁toujours", -10.584270477294922 ], [ "tive", -10.58430290222168 ], [ "▁automatically", -10.585600852966309 ], [ "▁dress", -10.585609436035156 ], [ "▁Minister", -10.585624694824219 ], [ "har", -10.586076736450195 ], [ "▁Start", -10.586249351501465 ], [ "▁=", -10.586563110351562 ], [ "▁pattern", -10.58659553527832 ], [ "tier", -10.58676528930664 ], [ "▁pays", -10.587034225463867 ], [ "▁profile", -10.58725357055664 ], [ "▁raised", -10.587263107299805 ], [ "ange", -10.587288856506348 ], [ "▁drink", -10.587762832641602 ], [ "▁element", -10.588042259216309 ], [ "▁landscape", -10.58875560760498 ], [ "▁Tag", -10.589073181152344 ], [ "▁cheese", -10.589590072631836 ], [ "ific", -10.590009689331055 ], [ "▁Stadt", -10.590181350708008 ], [ "39", -10.591398239135742 ], [ "▁launch", -10.592113494873047 ], [ "▁wouldn", -10.592150688171387 ], [ "AS", -10.592202186584473 ], [ "▁push", -10.593059539794922 ], [ "▁mill", -10.593452453613281 ], [ "▁mass", -10.593647003173828 ], [ "▁category", -10.593790054321289 ], [ "sondern", -10.594050407409668 ], [ "col", -10.594111442565918 ], [ "▁climate", -10.594313621520996 ], [ "lier", -10.594437599182129 ], [ "▁slightly", -10.595514297485352 ], [ "95", -10.596519470214844 ], [ "ace", -10.596612930297852 ], [ "▁domain", -10.597633361816406 ], [ "kan", -10.598306655883789 ], [ "▁feed", -10.598485946655273 ], [ "▁Live", -10.598837852478027 ], [ "▁Mais", -10.599113464355469 ], [ "▁après", -10.599365234375 ], [ "▁village", -10.59941577911377 ], [ "▁hatte", -10.59968090057373 ], [ "▁joined", -10.599881172180176 ], [ "▁Museum", -10.600311279296875 ], [ "head", -10.600855827331543 ], [ "▁draw", -10.6009521484375 ], [ "▁concerns", -10.600966453552246 ], [ "ER", -10.601505279541016 ], [ "▁technique", -10.601648330688477 ], [ "▁Bio", -10.601861000061035 ], [ "▁Sea", -10.601881980895996 ], [ "▁@", -10.601927757263184 ], [ "wer", -10.6021146774292 ], [ "▁battery", -10.602462768554688 ], [ "▁mostly", -10.60267448425293 ], [ "▁familiar", -10.602680206298828 ], [ "▁Sub", -10.602689743041992 ], [ "▁delicious", -10.603222846984863 ], [ "doch", -10.60326099395752 ], [ "60", -10.603395462036133 ], [ "▁carte", -10.603611946105957 ], [ "▁avut", -10.604146957397461 ], [ "▁premium", -10.60460376739502 ], [ "▁attempt", -10.604704856872559 ], [ "▁Über", -10.60473346710205 ], [ "▁combined", -10.604935646057129 ], [ "lement", -10.604947090148926 ], [ "▁voi", -10.605031967163086 ], [ "▁wonder", -10.605376243591309 ], [ "▁failure", -10.606106758117676 ], [ "which", -10.606147766113281 ], [ "esti", -10.606316566467285 ], [ "31", -10.606547355651855 ], [ "▁sta", -10.606734275817871 ], [ "▁transform", -10.60673999786377 ], [ "▁license", -10.606743812561035 ], [ "▁depending", -10.606758117675781 ], [ "▁specifically", -10.606782913208008 ], [ "▁OF", -10.60693645477295 ], [ "band", -10.606959342956543 ], [ "▁Sport", -10.60731315612793 ], [ "list", -10.607434272766113 ], [ "▁Tour", -10.60753059387207 ], [ "▁Israel", -10.607564926147461 ], [ "▁filled", -10.607722282409668 ], [ "▁manual", -10.60776138305664 ], [ "▁watching", -10.608621597290039 ], [ "▁rule", -10.608877182006836 ], [ "mat", -10.60901927947998 ], [ "▁notes", -10.609585762023926 ], [ "▁Oh", -10.60960578918457 ], [ "▁bereits", -10.609634399414062 ], [ "▁foundation", -10.609916687011719 ], [ "▁vital", -10.610146522521973 ], [ "▁lassen", -10.610747337341309 ], [ "▁cât", -10.611162185668945 ], [ "▁shipping", -10.611433029174805 ], [ "▁registered", -10.611513137817383 ], [ "▁jour", -10.612669944763184 ], [ "▁island", -10.61276626586914 ], [ "▁sets", -10.613068580627441 ], [ "▁football", -10.613683700561523 ], [ "▁EU", -10.613860130310059 ], [ "▁stone", -10.614019393920898 ], [ "▁Press", -10.614699363708496 ], [ "▁adapt", -10.615066528320312 ], [ "ised", -10.615425109863281 ], [ "▁thoughts", -10.615434646606445 ], [ "▁doors", -10.615851402282715 ], [ "€", -10.615954399108887 ], [ "▁components", -10.616040229797363 ], [ "rig", -10.616332054138184 ], [ "▁generation", -10.616585731506348 ], [ "▁guess", -10.616700172424316 ], [ "cker", -10.61694049835205 ], [ "▁realize", -10.617207527160645 ], [ "▁Roman", -10.617310523986816 ], [ "▁contre", -10.617693901062012 ], [ "▁Out", -10.617938995361328 ], [ "▁IN", -10.619051933288574 ], [ "cip", -10.619085311889648 ], [ "59", -10.619330406188965 ], [ "▁enhance", -10.619768142700195 ], [ "▁battle", -10.61982250213623 ], [ "▁monitor", -10.619863510131836 ], [ "▁Martin", -10.62045955657959 ], [ "▁websites", -10.620461463928223 ], [ "▁DE", -10.620599746704102 ], [ "▁Festival", -10.620951652526855 ], [ "ân", -10.62131118774414 ], [ "▁Place", -10.621419906616211 ], [ "▁rare", -10.621554374694824 ], [ "această", -10.621726989746094 ], [ "▁sollte", -10.621731758117676 ], [ "▁Read", -10.621816635131836 ], [ "ware", -10.622169494628906 ], [ "Those", -10.622671127319336 ], [ "ende", -10.623543739318848 ], [ "▁prix", -10.623835563659668 ], [ "▁roman", -10.624101638793945 ], [ "▁creation", -10.624224662780762 ], [ "▁confidence", -10.624552726745605 ], [ "▁Japan", -10.624638557434082 ], [ "▁rain", -10.624942779541016 ], [ "▁guys", -10.62518310546875 ], [ "▁south", -10.625236511230469 ], [ "▁trading", -10.625646591186523 ], [ "▁€", -10.626100540161133 ], [ "▁Film", -10.626341819763184 ], [ "▁pana", -10.627065658569336 ], [ "▁asemenea", -10.627066612243652 ], [ "36", -10.627190589904785 ], [ "▁instance", -10.627884864807129 ], [ "cou", -10.629385948181152 ], [ "▁nun", -10.630074501037598 ], [ "▁Pass", -10.630390167236328 ], [ "Cette", -10.630579948425293 ], [ "▁Network", -10.630876541137695 ], [ "▁prime", -10.631010055541992 ], [ "▁spiritual", -10.632098197937012 ], [ "▁tough", -10.633030891418457 ], [ "▁AND", -10.633086204528809 ], [ "▁Cat", -10.633601188659668 ], [ "▁boat", -10.633611679077148 ], [ "▁leads", -10.634864807128906 ], [ "▁Germany", -10.63509750366211 ], [ "▁valuable", -10.635635375976562 ], [ "57", -10.635892868041992 ], [ "lect", -10.636148452758789 ], [ "▁distribution", -10.636445045471191 ], [ "dar", -10.636518478393555 ], [ "▁Manager", -10.637701988220215 ], [ "cha", -10.637725830078125 ], [ "▁obtain", -10.637741088867188 ], [ "GB", -10.637908935546875 ], [ "▁unor", -10.638079643249512 ], [ "schaft", -10.638603210449219 ], [ "▁zwischen", -10.638723373413086 ], [ "▁winning", -10.639172554016113 ], [ "▁suis", -10.639811515808105 ], [ "58", -10.640130996704102 ], [ "▁Party", -10.640372276306152 ], [ "▁ceva", -10.640416145324707 ], [ "▁comprehensive", -10.640684127807617 ], [ "▁aceste", -10.640726089477539 ], [ "▁committed", -10.640726089477539 ], [ "▁Hu", -10.641382217407227 ], [ "ţ", -10.64149284362793 ], [ "▁north", -10.642021179199219 ], [ "werk", -10.642542839050293 ], [ "▁interface", -10.642794609069824 ], [ "▁Valley", -10.64281177520752 ], [ "▁anywhere", -10.64281177520752 ], [ "▁Only", -10.642851829528809 ], [ "TE", -10.643295288085938 ], [ "hui", -10.6436767578125 ], [ "bus", -10.643951416015625 ], [ "vis", -10.6439790725708 ], [ "▁Society", -10.645116806030273 ], [ "▁reliable", -10.64556884765625 ], [ "▁quelques", -10.64563274383545 ], [ "tech", -10.646187782287598 ], [ "ual", -10.646377563476562 ], [ "▁educational", -10.646418571472168 ], [ "serv", -10.646490097045898 ], [ "▁opinion", -10.646628379821777 ], [ "▁appears", -10.646702766418457 ], [ "▁count", -10.646795272827148 ], [ "irea", -10.646981239318848 ], [ "ban", -10.647504806518555 ], [ "▁45", -10.647530555725098 ], [ "▁contain", -10.647661209106445 ], [ "ost", -10.647663116455078 ], [ "▁anul", -10.647706031799316 ], [ "rien", -10.648159980773926 ], [ "gra", -10.648360252380371 ], [ "▁counter", -10.648946762084961 ], [ "-3", -10.650411605834961 ], [ "▁resource", -10.650463104248047 ], [ "▁Wo", -10.6505126953125 ], [ "▁posts", -10.650618553161621 ], [ "▁employee", -10.651320457458496 ], [ "rol", -10.651863098144531 ], [ "▁ended", -10.651969909667969 ], [ "met", -10.653080940246582 ], [ "▁meine", -10.653165817260742 ], [ "▁reached", -10.653368949890137 ], [ "gri", -10.653716087341309 ], [ "▁Bra", -10.65374755859375 ], [ "▁conduct", -10.654294967651367 ], [ "▁housing", -10.654422760009766 ], [ "▁tickets", -10.654792785644531 ], [ "▁database", -10.655674934387207 ], [ "IL", -10.656150817871094 ], [ "▁perspective", -10.656359672546387 ], [ "▁Har", -10.656404495239258 ], [ "▁error", -10.656549453735352 ], [ "▁meal", -10.656569480895996 ], [ "▁hearing", -10.657238006591797 ], [ "▁transition", -10.657302856445312 ], [ "▁browser", -10.657609939575195 ], [ "▁supported", -10.657609939575195 ], [ "▁starts", -10.658814430236816 ], [ "țe", -10.658902168273926 ], [ "▁adults", -10.658905029296875 ], [ "▁România", -10.65917682647705 ], [ "dra", -10.659884452819824 ], [ "▁worry", -10.660222053527832 ], [ "▁avoir", -10.660497665405273 ], [ "▁regional", -10.660507202148438 ], [ "▁min", -10.660722732543945 ], [ "▁Does", -10.660806655883789 ], [ "▁Keep", -10.661200523376465 ], [ "rom", -10.661237716674805 ], [ "sco", -10.661320686340332 ], [ "tem", -10.661898612976074 ], [ "▁Old", -10.661954879760742 ], [ "▁Under", -10.662552833557129 ], [ "▁Commission", -10.662557601928711 ], [ "▁Bau", -10.6632661819458 ], [ "▁News", -10.663358688354492 ], [ "▁mois", -10.663444519042969 ], [ "▁respond", -10.66356372833252 ], [ "▁alles", -10.663878440856934 ], [ "▁chair", -10.664475440979004 ], [ "▁ho", -10.664854049682617 ], [ "right", -10.664908409118652 ], [ "▁totally", -10.665532112121582 ], [ "gle", -10.665534973144531 ], [ "▁32", -10.665604591369629 ], [ "66", -10.665664672851562 ], [ "town", -10.665902137756348 ], [ "Ch", -10.666261672973633 ], [ "▁gr", -10.66629695892334 ], [ "▁garage", -10.666328430175781 ], [ "ții", -10.666495323181152 ], [ "▁Union", -10.667136192321777 ], [ "ică", -10.667343139648438 ], [ "▁2,", -10.668437004089355 ], [ "▁reflect", -10.669163703918457 ], [ "▁retail", -10.669388771057129 ], [ "▁unde", -10.669605255126953 ], [ "▁accessible", -10.670262336730957 ], [ "water", -10.67059326171875 ], [ "▁regard", -10.670710563659668 ], [ "▁logo", -10.671489715576172 ], [ "▁inspired", -10.671518325805664 ], [ "▁Wall", -10.671859741210938 ], [ "▁Ste", -10.672093391418457 ], [ "▁asking", -10.672179222106934 ], [ "▁Journal", -10.673028945922852 ], [ "▁Teil", -10.674042701721191 ], [ "▁collaboration", -10.674185752868652 ], [ "▁acid", -10.674266815185547 ], [ "▁Fund", -10.674382209777832 ], [ "▁spirit", -10.6744384765625 ], [ "despite", -10.674457550048828 ], [ "▁delivered", -10.674821853637695 ], [ "▁girls", -10.675374984741211 ], [ "▁Look", -10.675896644592285 ], [ "rant", -10.675949096679688 ], [ "▁District", -10.676460266113281 ], [ "▁rental", -10.676709175109863 ], [ "▁spune", -10.676733016967773 ], [ "els", -10.677544593811035 ], [ "▁permanent", -10.677659034729004 ], [ "▁iron", -10.677709579467773 ], [ "▁Thomas", -10.677745819091797 ], [ "EL", -10.678071022033691 ], [ "▁except", -10.678074836730957 ], [ "▁catch", -10.678366661071777 ], [ "▁providers", -10.678375244140625 ], [ "▁2006", -10.678435325622559 ], [ "▁chat", -10.679931640625 ], [ "▁emergency", -10.680281639099121 ], [ "gre", -10.68030834197998 ], [ "site", -10.680888175964355 ], [ "▁missing", -10.68089485168457 ], [ "abil", -10.680914878845215 ], [ "▁Hill", -10.68099594116211 ], [ "urs", -10.681312561035156 ], [ "▁plusieurs", -10.681716918945312 ], [ "▁birthday", -10.681726455688477 ], [ "DS", -10.682019233703613 ], [ "ersten", -10.682381629943848 ], [ "▁5.", -10.68252944946289 ], [ "▁library", -10.68333911895752 ], [ "▁earth", -10.683515548706055 ], [ "CI", -10.683645248413086 ], [ "▁lighting", -10.684442520141602 ], [ "▁fixed", -10.684879302978516 ], [ "tori", -10.684891700744629 ], [ "▁replace", -10.684995651245117 ], [ "▁administration", -10.685074806213379 ], [ "leurs", -10.685229301452637 ], [ "▁meat", -10.686142921447754 ], [ "▁songs", -10.686662673950195 ], [ "▁confirm", -10.686866760253906 ], [ "▁rapid", -10.68698787689209 ], [ "▁Special", -10.686995506286621 ], [ "▁holding", -10.687115669250488 ], [ "▁honor", -10.687271118164062 ], [ "▁Market", -10.687409400939941 ], [ "La", -10.687535285949707 ], [ "▁measure", -10.687760353088379 ], [ "▁guarantee", -10.68785572052002 ], [ "▁switch", -10.68813419342041 ], [ "▁extensive", -10.688294410705566 ], [ "▁Neu", -10.688674926757812 ], [ "avez", -10.688901901245117 ], [ "▁protein", -10.688984870910645 ], [ "▁infrastructure", -10.689454078674316 ], [ "▁functions", -10.689494132995605 ], [ "▁cont", -10.689496040344238 ], [ "row", -10.689760208129883 ], [ "star", -10.689773559570312 ], [ "▁Port", -10.690192222595215 ], [ "Using", -10.690336227416992 ], [ "▁faster", -10.690557479858398 ], [ "44", -10.691168785095215 ], [ "▁measures", -10.691615104675293 ], [ "▁celor", -10.69186019897461 ], [ "▁exam", -10.69189739227295 ], [ "200", -10.69202995300293 ], [ "î", -10.692545890808105 ], [ "▁conversation", -10.692832946777344 ], [ "▁brands", -10.692959785461426 ], [ "▁Code", -10.69359016418457 ], [ "▁Website", -10.693748474121094 ], [ "OS", -10.693782806396484 ], [ "▁alors", -10.693822860717773 ], [ "▁organ", -10.694032669067383 ], [ "▁removed", -10.694823265075684 ], [ "▁Head", -10.694905281066895 ], [ "▁Cha", -10.694908142089844 ], [ "▁visiting", -10.694928169250488 ], [ "▁wild", -10.694928169250488 ], [ "▁seit", -10.694962501525879 ], [ "49", -10.695109367370605 ], [ "▁organic", -10.69539737701416 ], [ "aţi", -10.695775032043457 ], [ "▁kit", -10.695947647094727 ], [ "68", -10.695959091186523 ], [ "▁flowers", -10.696124076843262 ], [ "▁appreciate", -10.697006225585938 ], [ "▁dead", -10.697439193725586 ], [ "▁Fire", -10.697539329528809 ], [ "▁cela", -10.697591781616211 ], [ "▁Ph", -10.697633743286133 ], [ "▁arrive", -10.697921752929688 ], [ "▁purposes", -10.698213577270508 ], [ "▁qualité", -10.698226928710938 ], [ "▁restaurants", -10.698478698730469 ], [ "▁advertising", -10.698541641235352 ], [ "cur", -10.69855785369873 ], [ "▁ça", -10.698973655700684 ], [ "▁introduced", -10.699088096618652 ], [ "▁returned", -10.699111938476562 ], [ "▁desire", -10.699511528015137 ], [ "▁soul", -10.699983596801758 ], [ "▁Technology", -10.699994087219238 ], [ ");", -10.700163841247559 ], [ "▁Royal", -10.700282096862793 ], [ "tant", -10.70068645477295 ], [ "▁possibly", -10.700702667236328 ], [ "▁consumers", -10.700812339782715 ], [ "▁doua", -10.70097541809082 ], [ "ified", -10.70097827911377 ], [ "▁Award", -10.70114803314209 ], [ "toutes", -10.70130443572998 ], [ "▁meant", -10.701325416564941 ], [ "ezi", -10.701616287231445 ], [ "▁plu", -10.701766014099121 ], [ "ţii", -10.7021484375 ], [ "▁talent", -10.702789306640625 ], [ "▁Security", -10.703309059143066 ], [ "arii", -10.703352928161621 ], [ "▁zi", -10.703455924987793 ], [ "▁Shop", -10.703667640686035 ], [ "▁breakfast", -10.704107284545898 ], [ "▁trial", -10.704485893249512 ], [ "ami", -10.704936981201172 ], [ "▁register", -10.705301284790039 ], [ "unserer", -10.705646514892578 ], [ "▁solar", -10.705697059631348 ], [ "▁deals", -10.70591926574707 ], [ "▁Ku", -10.7059326171875 ], [ "To", -10.706186294555664 ], [ "bat", -10.70680046081543 ], [ "MC", -10.707010269165039 ], [ "▁Global", -10.707018852233887 ], [ "у", -10.707405090332031 ], [ "▁nor", -10.707818984985352 ], [ "▁milk", -10.707868576049805 ], [ "▁choices", -10.708206176757812 ], [ "»", -10.7086763381958 ], [ "▁Sur", -10.708695411682129 ], [ "more", -10.708739280700684 ], [ "48", -10.709024429321289 ], [ "67", -10.709375381469727 ], [ "▁replacement", -10.709942817687988 ], [ "34", -10.710440635681152 ], [ "▁chocolate", -10.710485458374023 ], [ "▁Family", -10.71059513092041 ], [ "This", -10.71122932434082 ], [ "▁novel", -10.711435317993164 ], [ "▁Chicago", -10.711563110351562 ], [ "▁participate", -10.71166706085205 ], [ "▁trei", -10.712727546691895 ], [ "▁monthly", -10.713729858398438 ], [ "▁survey", -10.713977813720703 ], [ "▁End", -10.714285850524902 ], [ "▁Medical", -10.71442699432373 ], [ "autres", -10.714678764343262 ], [ "rich", -10.714698791503906 ], [ "▁bike", -10.714703559875488 ], [ "▁eventually", -10.714717864990234 ], [ "▁HD", -10.714722633361816 ], [ "bil", -10.714744567871094 ], [ "cent", -10.714902877807617 ], [ "▁afin", -10.715676307678223 ], [ "▁surgery", -10.716160774230957 ], [ "▁sin", -10.716455459594727 ], [ "▁manufacturing", -10.716955184936523 ], [ "▁consumer", -10.717245101928711 ], [ "system", -10.717306137084961 ], [ "▁object", -10.717400550842285 ], [ "▁Ju", -10.717422485351562 ], [ "ered", -10.7178373336792 ], [ "rac", -10.718070030212402 ], [ "▁clinical", -10.718664169311523 ], [ "▁dollars", -10.719761848449707 ], [ "▁chain", -10.71994686126709 ], [ "▁afternoon", -10.720196723937988 ], [ "▁ligne", -10.720422744750977 ], [ "▁accounts", -10.721806526184082 ], [ "ving", -10.722037315368652 ], [ "▁Australian", -10.72240924835205 ], [ "38", -10.722542762756348 ], [ "▁persoane", -10.72258472442627 ], [ "▁grande", -10.722668647766113 ], [ "▁Report", -10.723472595214844 ], [ "▁revenue", -10.723649024963379 ], [ "▁spre", -10.723760604858398 ], [ "▁cutting", -10.7239990234375 ], [ "▁approved", -10.724133491516113 ], [ "▁glad", -10.724188804626465 ], [ "chaque", -10.724395751953125 ], [ "win", -10.724435806274414 ], [ "▁waren", -10.724733352661133 ], [ "▁launched", -10.725071907043457 ], [ "▁layer", -10.725645065307617 ], [ "▁airport", -10.725716590881348 ], [ "▁effectively", -10.72572135925293 ], [ "▁coach", -10.725946426391602 ], [ "dé", -10.726130485534668 ], [ "LE", -10.72627067565918 ], [ "▁müssen", -10.726386070251465 ], [ "plan", -10.726641654968262 ], [ "dan", -10.726705551147461 ], [ "55", -10.726786613464355 ], [ "bringing", -10.726895332336426 ], [ "▁$2", -10.726995468139648 ], [ "nce", -10.727181434631348 ], [ "▁inspiration", -10.728177070617676 ], [ "You", -10.728657722473145 ], [ "▁soll", -10.729095458984375 ], [ "▁seemed", -10.729595184326172 ], [ "▁flight", -10.729687690734863 ], [ "▁prima", -10.729883193969727 ], [ "▁Welt", -10.730123519897461 ], [ "▁jetzt", -10.730315208435059 ], [ "ky", -10.730428695678711 ], [ "▁Western", -10.73054027557373 ], [ "▁label", -10.730600357055664 ], [ "▁möglich", -10.73081111907959 ], [ "▁input", -10.730862617492676 ], [ "▁laws", -10.730995178222656 ], [ "▁personnes", -10.731708526611328 ], [ "▁paying", -10.731731414794922 ], [ "▁Uhr", -10.73173713684082 ], [ "▁Mary", -10.731745719909668 ], [ "pur", -10.73190689086914 ], [ "▁covers", -10.732133865356445 ], [ "▁throw", -10.732522964477539 ], [ "▁Tor", -10.733281135559082 ], [ "▁bat", -10.73355484008789 ], [ "▁Gr", -10.73373031616211 ], [ "▁farm", -10.73376178741455 ], [ "▁improved", -10.733843803405762 ], [ "▁fără", -10.734286308288574 ], [ "▁theme", -10.73437213897705 ], [ "pens", -10.734865188598633 ], [ "▁Cup", -10.734975814819336 ], [ "▁settings", -10.735114097595215 ], [ "▁hire", -10.735234260559082 ], [ "▁massive", -10.735248565673828 ], [ "▁generate", -10.735405921936035 ], [ "▁earn", -10.735837936401367 ], [ "▁tab", -10.736431121826172 ], [ "For", -10.736616134643555 ], [ "gang", -10.736891746520996 ], [ "▁hin", -10.73709487915039 ], [ "▁roll", -10.737113952636719 ], [ "▁engagement", -10.737157821655273 ], [ "▁signed", -10.737177848815918 ], [ "▁League", -10.737323760986328 ], [ "▁registration", -10.737931251525879 ], [ "▁première", -10.738763809204102 ], [ "isse", -10.73896598815918 ], [ "▁university", -10.739027976989746 ], [ "ell", -10.739157676696777 ], [ "▁nou", -10.739169120788574 ], [ "rog", -10.739191055297852 ], [ "▁sitting", -10.739206314086914 ], [ "▁cazul", -10.739571571350098 ], [ "▁surrounding", -10.73983383178711 ], [ "▁Asia", -10.740357398986816 ], [ "▁bath", -10.740825653076172 ], [ "hal", -10.740923881530762 ], [ "▁plate", -10.741026878356934 ], [ "▁tests", -10.741151809692383 ], [ "▁presentation", -10.741156578063965 ], [ "▁chicken", -10.741501808166504 ], [ "▁Val", -10.741586685180664 ], [ "ably", -10.74166488647461 ], [ "▁magazine", -10.741697311401367 ], [ "▁Maybe", -10.74187183380127 ], [ "▁sauce", -10.742673873901367 ], [ "TC", -10.742887496948242 ], [ "▁exclusive", -10.74296760559082 ], [ "86", -10.74306869506836 ], [ "▁teeth", -10.743474960327148 ], [ "▁regularly", -10.743524551391602 ], [ "sed", -10.743824005126953 ], [ "gro", -10.744174003601074 ], [ "He", -10.744211196899414 ], [ "▁2017.", -10.744302749633789 ], [ "▁template", -10.74489688873291 ], [ "▁gleich", -10.744938850402832 ], [ "bal", -10.745061874389648 ], [ "▁African", -10.74511432647705 ], [ "în", -10.745231628417969 ], [ "▁rep", -10.74543571472168 ], [ "▁beat", -10.74588394165039 ], [ "▁deck", -10.746064186096191 ], [ "▁intended", -10.746221542358398 ], [ "▁para", -10.746513366699219 ], [ "▁IP", -10.746712684631348 ], [ "▁bra", -10.746881484985352 ], [ "▁forces", -10.746966361999512 ], [ "▁routine", -10.747184753417969 ], [ "▁Jahre", -10.747758865356445 ], [ "▁Bad", -10.74797534942627 ], [ "▁drivers", -10.748074531555176 ], [ "▁updates", -10.748095512390137 ], [ "▁elegant", -10.748279571533203 ], [ "▁external", -10.748444557189941 ], [ "▁engineering", -10.748819351196289 ], [ "ender", -10.749544143676758 ], [ "table", -10.749755859375 ], [ "inter", -10.749878883361816 ], [ "▁Romania", -10.749948501586914 ], [ "▁zile", -10.750468254089355 ], [ "▁luxury", -10.750570297241211 ], [ "▁calling", -10.750750541687012 ], [ "▁cooking", -10.75101375579834 ], [ "▁component", -10.75114631652832 ], [ "wan", -10.75121021270752 ], [ "schen", -10.751212120056152 ], [ "▁birth", -10.751242637634277 ], [ "asupra", -10.751349449157715 ], [ "Co", -10.751471519470215 ], [ "▁opt", -10.75153923034668 ], [ "▁discovered", -10.751860618591309 ], [ "▁teach", -10.752084732055664 ], [ "▁Son", -10.75234317779541 ], [ "▁guest", -10.752384185791016 ], [ "▁dogs", -10.752695083618164 ], [ "▁2003", -10.752745628356934 ], [ "▁behavior", -10.752750396728516 ], [ "pé", -10.7529935836792 ], [ "63", -10.75316333770752 ], [ "▁Human", -10.753702163696289 ], [ "▁expression", -10.754800796508789 ], [ "▁nevoie", -10.754936218261719 ], [ "▁recherche", -10.75528621673584 ], [ "ging", -10.755767822265625 ], [ "related", -10.755948066711426 ], [ "▁discount", -10.756040573120117 ], [ "▁Brown", -10.756054878234863 ], [ "▁Such", -10.756107330322266 ], [ "▁Ve", -10.757149696350098 ], [ "▁height", -10.757265090942383 ], [ "clo", -10.757414817810059 ], [ "▁incredible", -10.757912635803223 ], [ "▁bas", -10.757916450500488 ], [ "▁mă", -10.75798225402832 ], [ "▁purchased", -10.758240699768066 ], [ "▁compte", -10.75831127166748 ], [ "▁instructions", -10.758537292480469 ], [ "▁Instead", -10.75866985321045 ], [ "▁output", -10.758706092834473 ], [ "▁mom", -10.758886337280273 ], [ "DR", -10.759828567504883 ], [ "89", -10.760168075561523 ], [ "▁reduced", -10.760621070861816 ], [ "98", -10.7606840133667 ], [ "▁constant", -10.760879516601562 ], [ "▁therapy", -10.762417793273926 ], [ "▁capable", -10.762757301330566 ], [ "mark", -10.763265609741211 ], [ "▁Sometimes", -10.76332950592041 ], [ "▁joy", -10.763419151306152 ], [ "▁perfectly", -10.763589859008789 ], [ "▁painting", -10.763704299926758 ], [ "avait", -10.763765335083008 ], [ "▁Sha", -10.764384269714355 ], [ "▁dat", -10.764463424682617 ], [ "▁produits", -10.764479637145996 ], [ "tric", -10.76456356048584 ], [ "ierte", -10.765153884887695 ], [ "▁Smith", -10.765836715698242 ], [ "▁trebui", -10.766264915466309 ], [ "▁beaucoup", -10.766630172729492 ], [ "▁chosen", -10.767189025878906 ], [ "▁cre", -10.76732063293457 ], [ "▁complet", -10.767341613769531 ], [ "▁Ltd", -10.767599105834961 ], [ "▁recovery", -10.76781940460205 ], [ "▁district", -10.768423080444336 ], [ "78", -10.768640518188477 ], [ "▁Unter", -10.76872730255127 ], [ "▁schnell", -10.768729209899902 ], [ "▁apart", -10.768943786621094 ], [ "▁phase", -10.76894760131836 ], [ "▁seeking", -10.769091606140137 ], [ "▁mark", -10.769148826599121 ], [ "▁pet", -10.769233703613281 ], [ "▁PDF", -10.769296646118164 ], [ "▁efficiency", -10.769577980041504 ], [ "▁buildings", -10.769611358642578 ], [ "69", -10.769723892211914 ], [ "▁sens", -10.769858360290527 ], [ "▁Video", -10.770115852355957 ], [ "▁destination", -10.770181655883789 ], [ "▁female", -10.770319938659668 ], [ "▁supporting", -10.770674705505371 ], [ "▁signs", -10.77077865600586 ], [ "▁appeal", -10.770784378051758 ], [ "76", -10.77110481262207 ], [ "▁favourite", -10.771612167358398 ], [ "ock", -10.771702766418457 ], [ "▁readers", -10.771757125854492 ], [ "▁Did", -10.771868705749512 ], [ "rou", -10.772045135498047 ], [ "PA", -10.77222728729248 ], [ "▁Jean", -10.772480964660645 ], [ "▁Em", -10.772586822509766 ], [ "pass", -10.77280330657959 ], [ "▁Zi", -10.773090362548828 ], [ "▁între", -10.773261070251465 ], [ "▁fly", -10.773427963256836 ], [ "mos", -10.773666381835938 ], [ "▁emotional", -10.773860931396484 ], [ "asse", -10.774768829345703 ], [ "▁sessions", -10.775086402893066 ], [ "▁symptoms", -10.77564811706543 ], [ "▁died", -10.776217460632324 ], [ "▁seconds", -10.776628494262695 ], [ "▁procedure", -10.777206420898438 ], [ "▁express", -10.777420997619629 ], [ "▁două", -10.777885437011719 ], [ "▁valid", -10.778393745422363 ], [ "▁euro", -10.7788667678833 ], [ "▁interests", -10.779032707214355 ], [ "Having", -10.779237747192383 ], [ "▁hundreds", -10.779669761657715 ], [ "grad", -10.780023574829102 ], [ "▁neuen", -10.780084609985352 ], [ "▁cook", -10.780552864074707 ], [ "▁pur", -10.780834197998047 ], [ "▁charges", -10.781024932861328 ], [ "sche", -10.78118896484375 ], [ "▁smile", -10.781468391418457 ], [ "▁festival", -10.781611442565918 ], [ "cho", -10.781672477722168 ], [ "▁£", -10.781937599182129 ], [ "cht", -10.78201675415039 ], [ "▁macht", -10.782021522521973 ], [ "▁Wasser", -10.782028198242188 ], [ "▁Cap", -10.78226375579834 ], [ "▁Learn", -10.78274154663086 ], [ "▁load", -10.783162117004395 ], [ "▁aici", -10.783225059509277 ], [ "▁Ch", -10.784143447875977 ], [ "▁cycle", -10.784223556518555 ], [ "▁carried", -10.784337997436523 ], [ "▁jusqu", -10.784517288208008 ], [ "stein", -10.78505802154541 ], [ "ski", -10.78513240814209 ], [ "cap", -10.78579330444336 ], [ "▁Bal", -10.785852432250977 ], [ "▁minor", -10.786053657531738 ], [ "77", -10.786175727844238 ], [ "▁considering", -10.78632640838623 ], [ "innen", -10.78644847869873 ], [ "▁greatest", -10.787055015563965 ], [ "▁Training", -10.787137031555176 ], [ "08", -10.787307739257812 ], [ "▁significantly", -10.787607192993164 ], [ "gé", -10.787728309631348 ], [ "▁dumpster", -10.788351058959961 ], [ "▁allem", -10.788930892944336 ], [ "▁bonus", -10.7889404296875 ], [ "▁guy", -10.789036750793457 ], [ "fel", -10.78904914855957 ], [ "▁lifestyle", -10.789241790771484 ], [ "▁Bro", -10.78961181640625 ], [ "▁implement", -10.789687156677246 ], [ "lock", -10.790046691894531 ], [ "▁Earth", -10.790142059326172 ], [ "kar", -10.790733337402344 ], [ "▁invest", -10.790833473205566 ], [ "▁river", -10.790933609008789 ], [ "▁accurate", -10.791494369506836 ], [ "▁mu", -10.791579246520996 ], [ "▁celebrate", -10.792119979858398 ], [ "▁ran", -10.79256820678711 ], [ "▁bigger", -10.792988777160645 ], [ "▁Mer", -10.793476104736328 ], [ "▁millions", -10.793486595153809 ], [ "▁partie", -10.793563842773438 ], [ "▁dazu", -10.793951988220215 ], [ "▁Full", -10.794130325317383 ], [ "gie", -10.794207572937012 ], [ "bot", -10.794373512268066 ], [ "roll", -10.79472827911377 ], [ "▁Women", -10.795303344726562 ], [ "▁compare", -10.796135902404785 ], [ "▁van", -10.796503067016602 ], [ "▁apps", -10.796521186828613 ], [ "PC", -10.797050476074219 ], [ "▁drei", -10.79736042022705 ], [ "▁maison", -10.797588348388672 ], [ "▁knows", -10.797712326049805 ], [ "rid", -10.797972679138184 ], [ "62", -10.798396110534668 ], [ "class", -10.798508644104004 ], [ "▁chez", -10.798669815063477 ], [ "char", -10.798828125 ], [ "88", -10.798989295959473 ], [ "▁cast", -10.79948902130127 ], [ "▁examples", -10.79973030090332 ], [ "▁Therefore", -10.799823760986328 ], [ "▁topics", -10.799941062927246 ], [ "with", -10.80013656616211 ], [ "▁Anti", -10.800555229187012 ], [ "how", -10.800620079040527 ], [ "▁whom", -10.80094051361084 ], [ "▁Deutschland", -10.801124572753906 ], [ "tine", -10.80113697052002 ], [ "▁CEO", -10.801224708557129 ], [ "▁truck", -10.801350593566895 ], [ "▁Which", -10.8015718460083 ], [ "erie", -10.802017211914062 ], [ "fect", -10.802069664001465 ], [ "bou", -10.8026762008667 ], [ "▁(1", -10.802818298339844 ], [ "sum", -10.802980422973633 ], [ "▁bonne", -10.803068161010742 ], [ "▁remaining", -10.80321216583252 ], [ "▁equal", -10.803543090820312 ], [ "▁engage", -10.803561210632324 ], [ "▁RE", -10.803849220275879 ], [ "style", -10.804182052612305 ], [ "▁urma", -10.804337501525879 ], [ "▁Grund", -10.80496883392334 ], [ "ür", -10.8051176071167 ], [ "▁font", -10.805353164672852 ], [ "▁assets", -10.805916786193848 ], [ "AL", -10.806102752685547 ], [ "▁rear", -10.80635929107666 ], [ "▁contemporary", -10.80646800994873 ], [ "▁occur", -10.8067045211792 ], [ "rated", -10.806941986083984 ], [ "▁tight", -10.807088851928711 ], [ "▁machines", -10.807921409606934 ], [ "▁0.", -10.808456420898438 ], [ "▁Aber", -10.808470726013184 ], [ "sol", -10.808517456054688 ], [ "rü", -10.80858039855957 ], [ "▁2007", -10.809479713439941 ], [ "gg", -10.809488296508789 ], [ "▁unul", -10.809691429138184 ], [ "▁était", -10.809908866882324 ], [ "▁capture", -10.809980392456055 ], [ "▁command", -10.810037612915039 ], [ "▁wire", -10.810425758361816 ], [ "▁shift", -10.810762405395508 ], [ "▁bread", -10.81084156036377 ], [ "▁causes", -10.810937881469727 ], [ "PI", -10.810938835144043 ], [ "SC", -10.811086654663086 ], [ "▁lights", -10.811190605163574 ], [ "▁lived", -10.811293601989746 ], [ "mul", -10.811446189880371 ], [ "▁Cur", -10.811917304992676 ], [ "▁Richard", -10.811973571777344 ], [ "37", -10.812638282775879 ], [ "▁cup", -10.812737464904785 ], [ "▁fields", -10.812983512878418 ], [ "▁crusher", -10.813389778137207 ], [ "65", -10.813774108886719 ], [ "avons", -10.813822746276855 ], [ "▁gear", -10.813835144042969 ], [ "▁standing", -10.813844680786133 ], [ "▁thick", -10.81445026397705 ], [ "aff", -10.815132141113281 ], [ "ments", -10.815434455871582 ], [ "▁conflict", -10.815728187561035 ], [ "ität", -10.815825462341309 ], [ "▁worse", -10.816295623779297 ], [ "SE", -10.816332817077637 ], [ "imi", -10.816459655761719 ], [ "▁dating", -10.817033767700195 ], [ "Do", -10.817073822021484 ], [ "▁flexible", -10.817093849182129 ], [ "ologie", -10.817131996154785 ], [ "SU", -10.817200660705566 ], [ "▁contribute", -10.817306518554688 ], [ "▁denn", -10.817428588867188 ], [ "▁appointment", -10.81746768951416 ], [ "▁ticket", -10.817523002624512 ], [ "bed", -10.817892074584961 ], [ "▁2019.", -10.817936897277832 ], [ "▁tasks", -10.81871223449707 ], [ "▁carbon", -10.818734169006348 ], [ "▁situations", -10.819400787353516 ], [ "MA", -10.819402694702148 ], [ "▁portion", -10.819498062133789 ], [ "▁urban", -10.819585800170898 ], [ "▁Canadian", -10.819805145263672 ], [ "▁Bur", -10.819937705993652 ], [ "▁pack", -10.81995964050293 ], [ "▁effet", -10.819992065429688 ], [ "▁Ball", -10.82008171081543 ], [ "▁timpul", -10.82014274597168 ], [ "▁owned", -10.820211410522461 ], [ "▁surprise", -10.820413589477539 ], [ "▁Mu", -10.820582389831543 ], [ "▁decades", -10.821001052856445 ], [ "▁affected", -10.821728706359863 ], [ "▁proven", -10.821732521057129 ], [ "▁Fe", -10.821990966796875 ], [ "zy", -10.822042465209961 ], [ "42", -10.822175979614258 ], [ "▁trend", -10.8223876953125 ], [ "▁autres", -10.82262897491455 ], [ "No", -10.823028564453125 ], [ "▁nine", -10.823565483093262 ], [ "ON", -10.82376480102539 ], [ "NE", -10.823953628540039 ], [ "oli", -10.824359893798828 ], [ "▁Daniel", -10.824434280395508 ], [ "▁spa", -10.824939727783203 ], [ "▁messages", -10.825084686279297 ], [ "PS", -10.825183868408203 ], [ "47", -10.825703620910645 ], [ "▁doch", -10.826032638549805 ], [ "▁improvement", -10.826187133789062 ], [ "▁mountain", -10.826350212097168 ], [ "▁Room", -10.826451301574707 ], [ "▁edition", -10.826546669006348 ], [ "▁musical", -10.826712608337402 ], [ "CP", -10.827024459838867 ], [ "▁Mill", -10.827027320861816 ], [ "▁steht", -10.827740669250488 ], [ "▁determined", -10.828083038330078 ], [ "you", -10.828392028808594 ], [ "weg", -10.828554153442383 ], [ "▁Digital", -10.828624725341797 ], [ "▁filter", -10.828903198242188 ], [ "▁youth", -10.829047203063965 ], [ "▁assessment", -10.829301834106445 ], [ "▁butter", -10.829370498657227 ], [ "▁Watch", -10.829427719116211 ], [ "▁zusammen", -10.829471588134766 ], [ "▁View", -10.829606056213379 ], [ "09", -10.829649925231934 ], [ "▁sole", -10.829816818237305 ], [ ".00", -10.830018997192383 ], [ "33", -10.83015251159668 ], [ "▁export", -10.830229759216309 ], [ "ery", -10.830373764038086 ], [ "▁zurück", -10.830426216125488 ], [ "▁walls", -10.83048152923584 ], [ "▁recognize", -10.8306884765625 ], [ "law", -10.830801963806152 ], [ "▁parent", -10.830863952636719 ], [ "ST", -10.831357955932617 ], [ "▁description", -10.831669807434082 ], [ "MS", -10.831887245178223 ], [ "SM", -10.83189582824707 ], [ "▁Finally", -10.831940650939941 ], [ "▁hardware", -10.831965446472168 ], [ "ident", -10.832464218139648 ], [ "▁brown", -10.832566261291504 ], [ "▁kinds", -10.832950592041016 ], [ "▁Arts", -10.83297061920166 ], [ "▁concert", -10.83341121673584 ], [ "▁sec", -10.83342456817627 ], [ "▁represent", -10.833512306213379 ], [ "▁institutions", -10.833597183227539 ], [ "▁fur", -10.833998680114746 ], [ "▁Support", -10.83403205871582 ], [ "87", -10.834076881408691 ], [ "▁ease", -10.834178924560547 ], [ "▁feels", -10.834218978881836 ], [ "▁sheet", -10.834342002868652 ], [ "▁Though", -10.83437442779541 ], [ "▁propose", -10.834381103515625 ], [ "▁personnel", -10.834409713745117 ], [ "bie", -10.834794044494629 ], [ "▁contest", -10.834836959838867 ], [ "▁successfully", -10.835152626037598 ], [ "▁direkt", -10.835397720336914 ], [ "bietet", -10.835597038269043 ], [ "▁submit", -10.835888862609863 ], [ "▁sicher", -10.835919380187988 ], [ "▁Personal", -10.83607006072998 ], [ "94", -10.836341857910156 ], [ "61", -10.836400985717773 ], [ "▁Very", -10.836540222167969 ], [ "bol", -10.836603164672852 ], [ "▁ha", -10.837089538574219 ], [ "▁channel", -10.8372220993042 ], [ "mut", -10.837289810180664 ], [ "▁mouth", -10.837342262268066 ], [ "▁vast", -10.837395668029785 ], [ "▁Ob", -10.837569236755371 ], [ "lit", -10.83763313293457 ], [ "▁poly", -10.837878227233887 ], [ "▁trained", -10.838102340698242 ], [ "▁specialist", -10.838122367858887 ], [ "UL", -10.83822250366211 ], [ "▁seiner", -10.838336944580078 ], [ "SS", -10.838627815246582 ], [ "▁vacation", -10.838672637939453 ], [ "▁resume", -10.839157104492188 ], [ "▁constantly", -10.839717864990234 ], [ "▁treated", -10.83986759185791 ], [ "▁150", -10.840936660766602 ], [ "▁native", -10.841246604919434 ], [ "▁Russian", -10.841329574584961 ], [ "▁patterns", -10.841371536254883 ], [ "▁knowing", -10.841670989990234 ], [ "▁Pan", -10.841682434082031 ], [ "peri", -10.841848373413086 ], [ "aci", -10.841864585876465 ], [ "▁answers", -10.842114448547363 ], [ "▁heute", -10.842985153198242 ], [ "93", -10.843056678771973 ], [ "▁Winter", -10.844083786010742 ], [ "▁yes", -10.844173431396484 ], [ "SP", -10.844185829162598 ], [ "].", -10.844388008117676 ], [ "▁kein", -10.844862937927246 ], [ "▁introduce", -10.8450927734375 ], [ "-4", -10.84555435180664 ], [ "▁shoot", -10.845762252807617 ], [ "AR", -10.84576416015625 ], [ "▁receiving", -10.845864295959473 ], [ "▁intre", -10.84702205657959 ], [ "▁appeared", -10.84708023071289 ], [ "▁brother", -10.847321510314941 ], [ "▁extend", -10.847765922546387 ], [ "▁fara", -10.848737716674805 ], [ "▁kommt", -10.848876953125 ], [ "ali", -10.848913192749023 ], [ "▁numai", -10.849047660827637 ], [ "▁scientific", -10.84913158416748 ], [ "▁virtual", -10.849145889282227 ], [ "▁Ac", -10.849513053894043 ], [ "▁procedures", -10.849631309509277 ], [ "▁silver", -10.849821090698242 ], [ "▁leather", -10.849979400634766 ], [ "DA", -10.85014820098877 ], [ "▁executive", -10.850263595581055 ], [ "▁officials", -10.850496292114258 ], [ "▁agencies", -10.850503921508789 ], [ "▁Software", -10.850540161132812 ], [ "▁cor", -10.850690841674805 ], [ "Con", -10.850741386413574 ], [ "▁log", -10.851066589355469 ], [ "ț", -10.851147651672363 ], [ "02", -10.851195335388184 ], [ "▁7.", -10.85245132446289 ], [ "▁accepted", -10.852483749389648 ], [ "▁Berlin", -10.852538108825684 ], [ "ID", -10.852582931518555 ], [ "cot", -10.852788925170898 ], [ "▁employment", -10.852799415588379 ], [ "run", -10.853020668029785 ], [ "▁identified", -10.853178977966309 ], [ "96", -10.853887557983398 ], [ "▁déjà", -10.853944778442383 ], [ "▁cuisine", -10.853952407836914 ], [ "turi", -10.854070663452148 ], [ "▁Japanese", -10.854316711425781 ], [ "▁golf", -10.854514122009277 ], [ "▁Ki", -10.854787826538086 ], [ "▁carefully", -10.854863166809082 ], [ "▁remote", -10.854973793029785 ], [ "▁2018,", -10.855148315429688 ], [ "▁sus", -10.855154991149902 ], [ "tique", -10.855293273925781 ], [ "▁residential", -10.855695724487305 ], [ "97", -10.855809211730957 ], [ "▁Spring", -10.855908393859863 ], [ "▁Marketing", -10.856186866760254 ], [ "▁Control", -10.85630989074707 ], [ "var", -10.856344223022461 ], [ "▁historical", -10.8563814163208 ], [ "▁freedom", -10.856423377990723 ], [ "sure", -10.856426239013672 ], [ "▁broken", -10.856796264648438 ], [ "▁criminal", -10.856949806213379 ], [ "▁innovation", -10.857075691223145 ], [ "▁Italian", -10.857192039489746 ], [ "sper", -10.857282638549805 ], [ "▁cake", -10.857653617858887 ], [ "▁candidates", -10.857894897460938 ], [ "▁sizes", -10.858267784118652 ], [ "pel", -10.858366966247559 ], [ "▁frequently", -10.85889720916748 ], [ "▁planet", -10.859138488769531 ], [ "▁writer", -10.859519958496094 ], [ "1,", -10.859569549560547 ], [ "uvent", -10.85959529876709 ], [ "▁awareness", -10.859807968139648 ], [ "name", -10.859954833984375 ], [ "▁Children", -10.859980583190918 ], [ "▁relatively", -10.860311508178711 ], [ "▁pu", -10.860321998596191 ], [ "▁quiet", -10.86038875579834 ], [ "▁planned", -10.860716819763184 ], [ "▁election", -10.861419677734375 ], [ "▁6.", -10.861761093139648 ], [ "▁broad", -10.861772537231445 ], [ "▁skill", -10.861835479736328 ], [ "▁reasonable", -10.862037658691406 ], [ "▁Fort", -10.862283706665039 ], [ "▁aceea", -10.862407684326172 ], [ "▁arrived", -10.86263370513916 ], [ "▁payments", -10.862680435180664 ], [ "ack", -10.862700462341309 ], [ "▁Ort", -10.863354682922363 ], [ "▁investors", -10.863364219665527 ], [ "▁operate", -10.86351203918457 ], [ "ME", -10.863556861877441 ], [ "dic", -10.863683700561523 ], [ "▁foods", -10.863731384277344 ], [ "▁stick", -10.863831520080566 ], [ "▁agents", -10.86412525177002 ], [ "▁crowd", -10.864175796508789 ], [ "▁Students", -10.864480972290039 ], [ "▁concerned", -10.864609718322754 ], [ "test", -10.864740371704102 ], [ "▁designer", -10.865334510803223 ], [ "▁Conference", -10.865593910217285 ], [ "▁saving", -10.866105079650879 ], [ "▁recorded", -10.866422653198242 ], [ "▁proposed", -10.866564750671387 ], [ "▁ship", -10.86657428741455 ], [ "▁cred", -10.867274284362793 ], [ "▁Ci", -10.867440223693848 ], [ "RE", -10.867619514465332 ], [ "▁tradition", -10.867753982543945 ], [ "▁worldwide", -10.867779731750488 ], [ "64", -10.867944717407227 ], [ "▁television", -10.867989540100098 ], [ "▁projet", -10.868102073669434 ], [ "ency", -10.868487358093262 ], [ "▁struggle", -10.868514060974121 ], [ "▁twice", -10.868955612182617 ], [ "▁Off", -10.869234085083008 ], [ "▁begins", -10.869577407836914 ], [ "key", -10.869794845581055 ], [ "▁Table", -10.869963645935059 ], [ "▁demande", -10.870177268981934 ], [ "▁liquid", -10.870441436767578 ], [ "meter", -10.870684623718262 ], [ "▁2001", -10.871190071105957 ], [ "▁willing", -10.871660232543945 ], [ "▁medicine", -10.871707916259766 ], [ "▁expand", -10.871747970581055 ], [ "▁2004", -10.871804237365723 ], [ "▁2002", -10.872016906738281 ], [ "▁accord", -10.872292518615723 ], [ "▁Chris", -10.872446060180664 ], [ "▁prove", -10.872543334960938 ], [ "ston", -10.872740745544434 ], [ "mettre", -10.872800827026367 ], [ "▁moments", -10.873537063598633 ], [ "tik", -10.87368392944336 ], [ "such", -10.874055862426758 ], [ "2.", -10.874431610107422 ], [ "▁UN", -10.874561309814453 ], [ "▁jump", -10.874737739562988 ], [ "▁dish", -10.87539291381836 ], [ "▁Key", -10.875663757324219 ], [ "▁challenging", -10.875975608825684 ], [ "▁domestic", -10.876410484313965 ], [ "▁impressive", -10.876752853393555 ], [ "iger", -10.877022743225098 ], [ "▁Ram", -10.877157211303711 ], [ "▁doit", -10.877263069152832 ], [ "▁concrete", -10.87734317779541 ], [ "▁Unternehmen", -10.877397537231445 ], [ "▁LED", -10.877429008483887 ], [ "▁trouver", -10.877533912658691 ], [ "▁fundamental", -10.877875328063965 ], [ "▁implementation", -10.878121376037598 ], [ "85", -10.878247261047363 ], [ "▁hosting", -10.87856388092041 ], [ "▁Game", -10.878691673278809 ], [ "▁taught", -10.878981590270996 ], [ "tung", -10.879016876220703 ], [ "ront", -10.87940502166748 ], [ "▁shoes", -10.879639625549316 ], [ "79", -10.8797607421875 ], [ "▁stunning", -10.879778861999512 ], [ "▁Congress", -10.880142211914062 ], [ "▁Ent", -10.880278587341309 ], [ "▁Wer", -10.880607604980469 ], [ "▁alt", -10.880608558654785 ], [ "ör", -10.880699157714844 ], [ "▁calm", -10.8808012008667 ], [ "46", -10.881132125854492 ], [ "▁Daca", -10.881404876708984 ], [ "71", -10.881938934326172 ], [ "▁Dec", -10.882392883300781 ], [ "▁Fo", -10.882437705993652 ], [ "▁defense", -10.88313102722168 ], [ "▁expectations", -10.883166313171387 ], [ "▁Alle", -10.88318920135498 ], [ "▁brief", -10.883691787719727 ], [ "▁Hospital", -10.883975982666016 ], [ "▁sides", -10.884121894836426 ], [ "▁yellow", -10.884140014648438 ], [ "lei", -10.88451862335205 ], [ "▁speaking", -10.884589195251465 ], [ "▁crucial", -10.885198593139648 ], [ "▁Town", -10.8854341506958 ], [ "▁married", -10.885574340820312 ], [ "▁acesta", -10.885583877563477 ], [ "▁noted", -10.885611534118652 ], [ "▁Word", -10.885659217834473 ], [ "▁conducted", -10.885963439941406 ], [ "▁decor", -10.886249542236328 ], [ "kon", -10.886565208435059 ], [ "▁supplies", -10.8866605758667 ], [ "▁adventure", -10.886691093444824 ], [ "▁exhibition", -10.887163162231445 ], [ "heit", -10.887300491333008 ], [ "▁36", -10.88744831085205 ], [ "eria", -10.887505531311035 ], [ "ines", -10.887551307678223 ], [ "ological", -10.887582778930664 ], [ "quel", -10.88806438446045 ], [ "▁Van", -10.88825511932373 ], [ "-19", -10.88853645324707 ], [ "2,", -10.888566970825195 ], [ "▁Band", -10.888989448547363 ], [ "▁soil", -10.889184951782227 ], [ "▁Tim", -10.889599800109863 ], [ "▁NOT", -10.88968563079834 ], [ "▁pilot", -10.889753341674805 ], [ "▁Sh", -10.889774322509766 ], [ "Ho", -10.890361785888672 ], [ "CA", -10.890509605407715 ], [ "▁Eu", -10.890745162963867 ], [ "▁committee", -10.890829086303711 ], [ "▁Store", -10.891075134277344 ], [ "▁joint", -10.89111614227295 ], [ "▁Op", -10.891315460205078 ], [ "▁Jack", -10.891985893249512 ], [ "quality", -10.89216423034668 ], [ "▁Has", -10.892489433288574 ], [ "▁wenig", -10.892507553100586 ], [ "hood", -10.892545700073242 ], [ "▁Class", -10.892582893371582 ], [ "rus", -10.892773628234863 ], [ "▁grown", -10.89294719696045 ], [ "▁About", -10.893518447875977 ], [ "▁sum", -10.893942832946777 ], [ "▁Fair", -10.893946647644043 ], [ "SA", -10.894149780273438 ], [ "92", -10.894185066223145 ], [ "▁fourth", -10.894354820251465 ], [ "▁featured", -10.894384384155273 ], [ "▁Pen", -10.89444637298584 ], [ "▁natürlich", -10.894885063171387 ], [ "ched", -10.894901275634766 ], [ "▁ban", -10.895112991333008 ], [ "anne", -10.89522647857666 ], [ "▁theory", -10.895413398742676 ], [ "bin", -10.895438194274902 ], [ "iers", -10.895819664001465 ], [ "▁strategic", -10.895903587341309 ], [ "▁jours", -10.895956039428711 ], [ "▁communicate", -10.896124839782715 ], [ "▁pin", -10.896320343017578 ], [ "▁Bon", -10.89721393585205 ], [ "kom", -10.897290229797363 ], [ "-5", -10.898177146911621 ], [ "▁degrees", -10.898643493652344 ], [ "▁entertainment", -10.899014472961426 ], [ "ară", -10.899248123168945 ], [ "ales", -10.899425506591797 ], [ "▁pendant", -10.89954662322998 ], [ "▁Series", -10.899575233459473 ], [ "▁holds", -10.899592399597168 ], [ "▁Mini", -10.899828910827637 ], [ "▁Obama", -10.899898529052734 ], [ "▁conform", -10.900163650512695 ], [ "-10", -10.900216102600098 ], [ "▁preparation", -10.9009370803833 ], [ "▁autre", -10.90105152130127 ], [ "▁mortgage", -10.901155471801758 ], [ "▁Kan", -10.901508331298828 ], [ "▁typical", -10.901538848876953 ], [ "01", -10.901711463928223 ], [ "▁Review", -10.901862144470215 ], [ "▁laptop", -10.902127265930176 ], [ "CR", -10.902610778808594 ], [ "▁thread", -10.90265941619873 ], [ "BS", -10.902661323547363 ], [ "▁upper", -10.902700424194336 ], [ "▁searching", -10.902932167053223 ], [ "▁pen", -10.903214454650879 ], [ "▁Middle", -10.90333080291748 ], [ "73", -10.903359413146973 ], [ "▁leg", -10.903650283813477 ], [ "onic", -10.904272079467773 ], [ "IS", -10.904356956481934 ], [ "▁Kar", -10.904623985290527 ], [ "anz", -10.9046630859375 ], [ "▁circuit", -10.904901504516602 ], [ "▁Casino", -10.905384063720703 ], [ "07", -10.90584659576416 ], [ "▁petit", -10.905906677246094 ], [ "TV", -10.905978202819824 ], [ "level", -10.906311988830566 ], [ "▁Point", -10.906312942504883 ], [ "rau", -10.906474113464355 ], [ "▁cabinet", -10.906991958618164 ], [ "▁failed", -10.907042503356934 ], [ "▁stated", -10.907126426696777 ], [ "LA", -10.907461166381836 ], [ "▁privacy", -10.907596588134766 ], [ "vol", -10.907901763916016 ], [ "ativ", -10.908151626586914 ], [ "▁matters", -10.908210754394531 ], [ "▁Mor", -10.908555030822754 ], [ "▁Ur", -10.90860652923584 ], [ "view", -10.908968925476074 ], [ "▁consultation", -10.90921688079834 ], [ "TS", -10.909296989440918 ], [ "▁apartment", -10.909412384033203 ], [ "▁integrated", -10.909425735473633 ], [ "74", -10.909669876098633 ], [ "▁Through", -10.909710884094238 ], [ "▁kick", -10.909798622131348 ], [ "▁perioada", -10.90993881225586 ], [ "▁entirely", -10.909953117370605 ], [ "▁impossible", -10.91015911102295 ], [ "▁consideration", -10.910268783569336 ], [ "▁Alt", -10.91054916381836 ], [ "▁Come", -10.911089897155762 ], [ "▁outstanding", -10.911276817321777 ], [ "83", -10.911727905273438 ], [ "▁prezent", -10.911859512329102 ], [ "▁Local", -10.911993980407715 ], [ "▁Camp", -10.912056922912598 ], [ "▁bear", -10.912067413330078 ], [ "enden", -10.912262916564941 ], [ "life", -10.91236686706543 ], [ "▁Haus", -10.912516593933105 ], [ "▁William", -10.912644386291504 ], [ "“,", -10.912665367126465 ], [ "▁Instagram", -10.91285514831543 ], [ "▁solve", -10.913195610046387 ], [ "▁Ze", -10.913431167602539 ], [ "▁everyday", -10.91357135772705 ], [ "bla", -10.913615226745605 ], [ "eng", -10.913662910461426 ], [ "ough", -10.914246559143066 ], [ "84", -10.914483070373535 ], [ "?\"", -10.914599418640137 ], [ "rely", -10.91476821899414 ], [ "TH", -10.914841651916504 ], [ "lang", -10.91511058807373 ], [ "82", -10.915817260742188 ], [ "▁removal", -10.91589641571045 ], [ "ală", -10.915956497192383 ], [ "▁circumstances", -10.916097640991211 ], [ "ente", -10.91622257232666 ], [ "▁lieu", -10.91645336151123 ], [ "▁2016.", -10.91710376739502 ], [ "▁ales", -10.917342185974121 ], [ "▁pure", -10.917482376098633 ], [ "▁choosing", -10.917590141296387 ], [ "▁Russia", -10.917698860168457 ], [ "amp", -10.917703628540039 ], [ "▁Santa", -10.91788387298584 ], [ "▁happening", -10.918203353881836 ], [ "▁crew", -10.91822338104248 ], [ "▁lei", -10.91855239868164 ], [ "IP", -10.91858196258545 ], [ "RO", -10.919425964355469 ], [ "▁resort", -10.919514656066895 ], [ "ened", -10.919689178466797 ], [ "MB", -10.920031547546387 ], [ "▁styles", -10.920052528381348 ], [ "▁dernier", -10.920533180236816 ], [ "uck", -10.920699119567871 ], [ "▁Guide", -10.920710563659668 ], [ "fic", -10.92096996307373 ], [ "▁fitness", -10.921977996826172 ], [ "▁healthcare", -10.92223072052002 ], [ "mol", -10.92237663269043 ], [ "▁vis", -10.922721862792969 ], [ "▁atmosphere", -10.922972679138184 ], [ "▁motion", -10.922989845275879 ], [ "▁closer", -10.923114776611328 ], [ "▁SA", -10.92335319519043 ], [ "▁default", -10.923371315002441 ], [ "▁architecture", -10.923471450805664 ], [ "iile", -10.923528671264648 ], [ "zel", -10.923675537109375 ], [ "cla", -10.92387866973877 ], [ "OP", -10.924382209777832 ], [ "▁west", -10.924965858459473 ], [ "▁Energy", -10.925613403320312 ], [ "▁positions", -10.925777435302734 ], [ "▁contrast", -10.925885200500488 ], [ "▁serves", -10.92605972290039 ], [ "cup", -10.926340103149414 ], [ "▁rose", -10.926485061645508 ], [ "pers", -10.92664623260498 ], [ "▁noise", -10.926846504211426 ], [ "mont", -10.92690658569336 ], [ " -10.927061080932617 ], [ "lies", -10.927326202392578 ], [ "pat", -10.927718162536621 ], [ "IC", -10.927956581115723 ], [ "arc", -10.927989959716797 ], [ "▁winner", -10.928524017333984 ], [ "tent", -10.928732872009277 ], [ "▁Preis", -10.929106712341309 ], [ "▁vin", -10.929254531860352 ], [ "blo", -10.92929458618164 ], [ "ție", -10.929520606994629 ], [ "▁OR", -10.930315017700195 ], [ "▁Buch", -10.930798530578613 ], [ "▁nearby", -10.931190490722656 ], [ "▁meetings", -10.931290626525879 ], [ "▁48", -10.931465148925781 ], [ "▁quand", -10.93152904510498 ], [ "▁usual", -10.931936264038086 ], [ "▁weitere", -10.932539939880371 ], [ "▁caught", -10.932571411132812 ], [ "▁issued", -10.932626724243164 ], [ "ști", -10.932896614074707 ], [ "upcoming", -10.933232307434082 ], [ "▁agreed", -10.933233261108398 ], [ "place", -10.933353424072266 ], [ "▁Brand", -10.93344497680664 ], [ "▁relation", -10.933969497680664 ], [ "▁atât", -10.934090614318848 ], [ "▁Tre", -10.934176445007324 ], [ "▁lors", -10.934438705444336 ], [ "▁adopt", -10.934452056884766 ], [ "▁celui", -10.93458366394043 ], [ "cken", -10.93505859375 ], [ "▁partnership", -10.935284614562988 ], [ "?”", -10.935376167297363 ], [ "▁ba", -10.935746192932129 ], [ "▁ID", -10.935832023620605 ], [ "▁consistent", -10.935835838317871 ], [ "▁Ya", -10.935941696166992 ], [ "▁Academy", -10.936182022094727 ], [ "cial", -10.936230659484863 ], [ "1%", -10.936366081237793 ], [ "▁mise", -10.936684608459473 ], [ "▁gute", -10.936728477478027 ], [ "gli", -10.936939239501953 ], [ "▁Bu", -10.937679290771484 ], [ "▁reduction", -10.937917709350586 ], [ "acy", -10.938126564025879 ], [ "aga", -10.938161849975586 ], [ "▁Sc", -10.938273429870605 ], [ "▁Informationen", -10.938308715820312 ], [ "▁kommen", -10.938352584838867 ], [ "press", -10.93837833404541 ], [ "▁bridge", -10.938379287719727 ], [ "▁qualified", -10.938671112060547 ], [ "position", -10.938821792602539 ], [ "▁combat", -10.938933372497559 ], [ "!\"", -10.938993453979492 ], [ "eva", -10.939217567443848 ], [ "oase", -10.939380645751953 ], [ "▁inner", -10.939410209655762 ], [ "▁loans", -10.939720153808594 ], [ "made", -10.939786911010742 ], [ "▁Mexico", -10.93993091583252 ], [ "▁formal", -10.940092086791992 ], [ "▁fell", -10.94021987915039 ], [ "91", -10.940524101257324 ], [ "▁campus", -10.9407320022583 ], [ "ienne", -10.940869331359863 ], [ "▁framework", -10.94105339050293 ], [ "ncing", -10.941157341003418 ], [ "▁Para", -10.941222190856934 ], [ "▁password", -10.941298484802246 ], [ "▁sei", -10.941422462463379 ], [ "▁Cross", -10.941532135009766 ], [ "▁Ten", -10.941873550415039 ], [ "bank", -10.941887855529785 ], [ "▁gun", -10.942000389099121 ], [ "ient", -10.942021369934082 ], [ "▁usage", -10.942176818847656 ], [ "▁(2", -10.942278861999512 ], [ "Gra", -10.942320823669434 ], [ "▁prea", -10.94253158569336 ], [ "▁Als", -10.942619323730469 ], [ "▁finance", -10.942638397216797 ], [ "tate", -10.942665100097656 ], [ "ition", -10.942703247070312 ], [ "▁regulations", -10.942741394042969 ], [ "▁Professional", -10.943001747131348 ], [ "▁pl", -10.94336986541748 ], [ "▁SEO", -10.943472862243652 ], [ "▁trecut", -10.943487167358398 ], [ "▁aller", -10.943509101867676 ], [ "▁violence", -10.943986892700195 ], [ "▁membership", -10.944117546081543 ], [ "▁picked", -10.944162368774414 ], [ "▁collected", -10.9443359375 ], [ "▁extended", -10.944449424743652 ], [ "▁religious", -10.944661140441895 ], [ "▁salle", -10.944767951965332 ], [ "RA", -10.944781303405762 ], [ "▁blend", -10.945232391357422 ], [ "▁Min", -10.94532299041748 ], [ "kal", -10.945887565612793 ], [ "▁featuring", -10.945902824401855 ], [ "▁researchers", -10.946263313293457 ], [ "▁Search", -10.946558952331543 ], [ "CE", -10.946675300598145 ], [ "▁recognized", -10.94682502746582 ], [ "▁semi", -10.94692611694336 ], [ "▁exposure", -10.94718074798584 ], [ "grew", -10.947466850280762 ], [ "▁candidate", -10.948250770568848 ], [ "▁shares", -10.948908805847168 ], [ "▁edit", -10.949745178222656 ], [ "CS", -10.949905395507812 ], [ "▁Cl", -10.950240135192871 ], [ "▁Enjoy", -10.951438903808594 ], [ "▁hurt", -10.951482772827148 ], [ "▁bottle", -10.951593399047852 ], [ "▁Buy", -10.95159912109375 ], [ "▁superior", -10.952286720275879 ], [ "▁missed", -10.952424049377441 ], [ "▁workshop", -10.952433586120605 ], [ "action", -10.952437400817871 ], [ "ple", -10.952699661254883 ], [ "▁Schul", -10.952814102172852 ], [ "▁houses", -10.953080177307129 ], [ "▁2017,", -10.953569412231445 ], [ "▁killed", -10.953750610351562 ], [ "▁calendar", -10.954306602478027 ], [ "▁Mike", -10.954597473144531 ], [ "FA", -10.954627990722656 ], [ "nut", -10.95487117767334 ], [ "▁establish", -10.955140113830566 ], [ "▁alcohol", -10.95514965057373 ], [ "▁closely", -10.955170631408691 ], [ "▁MA", -10.955381393432617 ], [ "pul", -10.955389022827148 ], [ "▁defined", -10.955666542053223 ], [ "aires", -10.955692291259766 ], [ "▁Shi", -10.955703735351562 ], [ "▁plays", -10.956303596496582 ], [ "▁sister", -10.95690631866455 ], [ "▁cable", -10.957179069519043 ], [ "▁desk", -10.957215309143066 ], [ "▁apoi", -10.957738876342773 ], [ "▁identity", -10.95785140991211 ], [ "▁stars", -10.957931518554688 ], [ "▁fata", -10.958008766174316 ], [ "▁obvious", -10.958330154418945 ], [ "▁dental", -10.95843505859375 ], [ "AM", -10.958802223205566 ], [ "▁sharp", -10.95881175994873 ], [ "duc", -10.959053993225098 ], [ "▁manufacturer", -10.95914077758789 ], [ "!)", -10.959270477294922 ], [ "▁objects", -10.959720611572266 ], [ "▁Ag", -10.959989547729492 ], [ "referred", -10.960195541381836 ], [ "▁Ak", -10.960308074951172 ], [ "burg", -10.960360527038574 ], [ "▁nouveau", -10.960854530334473 ], [ "▁Pal", -10.960994720458984 ], [ "▁Arbeits", -10.961280822753906 ], [ "▁personally", -10.961288452148438 ], [ "▁Dé", -10.961292266845703 ], [ "▁import", -10.961688041687012 ], [ "▁justice", -10.961913108825684 ], [ "▁photography", -10.962705612182617 ], [ "▁portfolio", -10.962841987609863 ], [ "56", -10.96314525604248 ], [ "▁nouvelle", -10.963293075561523 ], [ "▁oven", -10.964197158813477 ], [ "▁400", -10.964272499084473 ], [ "▁mixed", -10.964395523071289 ], [ "▁relax", -10.964427947998047 ], [ "▁imp", -10.964703559875488 ], [ "▁».", -10.964734077453613 ], [ "▁mail", -10.964777946472168 ], [ "rage", -10.964861869812012 ], [ "nos", -10.964974403381348 ], [ "▁drugs", -10.965195655822754 ], [ "▁jede", -10.965211868286133 ], [ "▁einige", -10.965232849121094 ], [ "▁8.", -10.965325355529785 ], [ "ters", -10.965412139892578 ], [ "▁electrical", -10.965432167053223 ], [ "▁puis", -10.965836524963379 ], [ "▁films", -10.965903282165527 ], [ "41", -10.966036796569824 ], [ "▁moral", -10.966398239135742 ], [ "lage", -10.966402053833008 ], [ "▁spaces", -10.966415405273438 ], [ "▁Ed", -10.966462135314941 ], [ "▁classroom", -10.966588020324707 ], [ "▁große", -10.966588973999023 ], [ "▁baza", -10.966887474060059 ], [ "face", -10.967308044433594 ], [ "▁informed", -10.967333793640137 ], [ "▁improving", -10.967477798461914 ], [ "▁guidance", -10.967880249023438 ], [ "▁gallery", -10.96800708770752 ], [ "cular", -10.968046188354492 ], [ "53", -10.968094825744629 ], [ "Despite", -10.968238830566406 ], [ "▁forme", -10.968304634094238 ], [ "▁système", -10.968415260314941 ], [ "▁Win", -10.968494415283203 ], [ "▁Small", -10.968537330627441 ], [ "▁Mobile", -10.968564987182617 ], [ "▁tape", -10.968606948852539 ], [ "▁erhalten", -10.968914985656738 ], [ "▁movies", -10.968928337097168 ], [ "▁Unfortunately", -10.968963623046875 ], [ "▁Looking", -10.96945858001709 ], [ "▁guard", -10.969584465026855 ], [ "▁pr", -10.969820976257324 ], [ "▁confident", -10.96988582611084 ], [ "BA", -10.970229148864746 ], [ "bas", -10.970272064208984 ], [ "hum", -10.97050666809082 ], [ "ular", -10.9705171585083 ], [ "▁Still", -10.970593452453613 ], [ "▁flavor", -10.970656394958496 ], [ "▁boost", -10.970773696899414 ], [ "▁division", -10.970842361450195 ], [ "ising", -10.971006393432617 ], [ "▁monitoring", -10.971044540405273 ], [ "▁Sen", -10.97105884552002 ], [ "▁https", -10.971527099609375 ], [ "mainly", -10.971735000610352 ], [ "play", -10.972251892089844 ], [ "▁dynamic", -10.972357749938965 ], [ "▁coup", -10.972370147705078 ], [ "▁carpet", -10.972561836242676 ], [ "iner", -10.972846984863281 ], [ "ral", -10.97325611114502 ], [ "iser", -10.973320007324219 ], [ "RC", -10.9739990234375 ], [ "▁definition", -10.97475814819336 ], [ "▁Za", -10.974767684936523 ], [ "friendly", -10.974883079528809 ], [ "43", -10.975123405456543 ], [ "link", -10.975180625915527 ], [ "▁Multi", -10.97519302368164 ], [ "▁einmal", -10.975272178649902 ], [ "▁stopped", -10.975394248962402 ], [ "vel", -10.975456237792969 ], [ "▁ongoing", -10.975565910339355 ], [ "▁ancient", -10.976259231567383 ], [ "take", -10.976301193237305 ], [ "cia", -10.976432800292969 ], [ "▁USB", -10.976545333862305 ], [ "▁attorney", -10.976866722106934 ], [ "▁slot", -10.976866722106934 ], [ "▁Line", -10.97693157196045 ], [ "rice", -10.977087020874023 ], [ "ify", -10.977520942687988 ], [ "ó", -10.978260040283203 ], [ "▁flash", -10.978483200073242 ], [ "▁extension", -10.978555679321289 ], [ "▁Ende", -10.979022979736328 ], [ "▁powder", -10.979114532470703 ], [ "ească", -10.979143142700195 ], [ "03", -10.979327201843262 ], [ "▁normally", -10.979416847229004 ], [ "▁pun", -10.980108261108398 ], [ "viewed", -10.980138778686523 ], [ "ssen", -10.980896949768066 ], [ "ache", -10.981121063232422 ], [ "ește", -10.98122787475586 ], [ "▁PA", -10.981266021728516 ], [ "FI", -10.981945991516113 ], [ "▁Frank", -10.98198127746582 ], [ "▁apa", -10.98242473602295 ], [ "▁coast", -10.982614517211914 ], [ "▁boy", -10.982665061950684 ], [ "lim", -10.982902526855469 ], [ "▁putin", -10.983194351196289 ], [ "▁script", -10.983332633972168 ], [ "▁noticed", -10.9837007522583 ], [ "▁dealing", -10.983922004699707 ], [ "▁Trans", -10.984100341796875 ], [ "▁border", -10.984447479248047 ], [ "▁reputation", -10.984657287597656 ], [ "-2", -10.984662055969238 ], [ "HS", -10.984707832336426 ], [ "▁supports", -10.984724998474121 ], [ "▁horse", -10.985146522521973 ], [ "nik", -10.98520565032959 ], [ "▁clothes", -10.985234260559082 ], [ "▁Card", -10.985612869262695 ], [ "▁relief", -10.98595905303955 ], [ "▁Visit", -10.986259460449219 ], [ "▁luni", -10.986593246459961 ], [ "81", -10.986693382263184 ], [ "qua", -10.986945152282715 ], [ "▁Comp", -10.98697280883789 ], [ "▁investigation", -10.987137794494629 ], [ "▁depth", -10.987598419189453 ], [ "▁earned", -10.987709045410156 ], [ "▁Ren", -10.988090515136719 ], [ "▁Dumnezeu", -10.988107681274414 ], [ "▁Joe", -10.988210678100586 ], [ "▁goods", -10.988288879394531 ], [ "▁Vol", -10.988686561584473 ], [ "▁certified", -10.989118576049805 ], [ "▁favor", -10.989326477050781 ], [ "▁Scott", -10.989599227905273 ], [ "▁protest", -10.989802360534668 ], [ "▁pace", -10.989803314208984 ], [ "▁Angeles", -10.990368843078613 ], [ "inch", -10.99050521850586 ], [ "▁charged", -10.99052619934082 ], [ "code", -10.990968704223633 ], [ "▁convenient", -10.99138355255127 ], [ "▁Nord", -10.991556167602539 ], [ "▁yesterday", -10.991691589355469 ], [ "Dacă", -10.99169635772705 ], [ "▁Travel", -10.991786003112793 ], [ "▁kid", -10.991941452026367 ], [ "ction", -10.991986274719238 ], [ "▁groupe", -10.992770195007324 ], [ "pu", -10.993056297302246 ], [ "bzw", -10.993196487426758 ], [ "▁mixture", -10.993513107299805 ], [ "▁Farm", -10.993715286254883 ], [ "▁acces", -10.993939399719238 ], [ "matic", -10.993950843811035 ], [ "▁comparison", -10.994006156921387 ], [ "reich", -10.994095802307129 ], [ "pet", -10.994502067565918 ], [ "▁lit", -10.994685173034668 ], [ "▁organized", -10.99476432800293 ], [ "just", -10.995564460754395 ], [ "▁fellow", -10.996004104614258 ], [ "Ver", -10.996209144592285 ], [ "▁trends", -10.99622631072998 ], [ "▁evaluation", -10.99626636505127 ], [ "feld", -10.99639892578125 ], [ "▁Pu", -10.99671459197998 ], [ "▁equipped", -10.99727725982666 ], [ "▁catre", -10.997278213500977 ], [ "eck", -10.997369766235352 ], [ "▁facing", -10.997998237609863 ], [ "▁instrument", -10.998361587524414 ], [ "▁pleased", -10.998507499694824 ], [ "▁tap", -10.998818397521973 ], [ "dom", -10.998826026916504 ], [ "▁pump", -10.999384880065918 ], [ "▁functional", -10.999429702758789 ], [ "▁authority", -10.999455451965332 ], [ "▁experiment", -10.999478340148926 ], [ "LO", -10.999529838562012 ], [ "▁scheduled", -10.999552726745605 ], [ "halt", -10.999604225158691 ], [ "▁ceiling", -10.999761581420898 ], [ "▁Step", -11.000310897827148 ], [ "▁orders", -11.00032901763916 ], [ "▁speech", -11.001046180725098 ], [ "▁stands", -11.001119613647461 ], [ "▁disc", -11.001920700073242 ], [ "▁rec", -11.001935958862305 ], [ "▁Text", -11.00243854522705 ], [ "▁banks", -11.00294017791748 ], [ "▁oameni", -11.003045082092285 ], [ "▁communications", -11.003194808959961 ], [ "trag", -11.003307342529297 ], [ "▁trail", -11.003803253173828 ], [ "AN", -11.00426197052002 ], [ "▁Federal", -11.004467964172363 ], [ "▁quote", -11.00455093383789 ], [ "▁spus", -11.004620552062988 ], [ "▁managing", -11.004990577697754 ], [ "▁booking", -11.00505256652832 ], [ "▁Blog", -11.005669593811035 ], [ "▁tank", -11.005681991577148 ], [ "pon", -11.005804061889648 ], [ "GE", -11.00582218170166 ], [ "▁fiscal", -11.005871772766113 ], [ "▁satisfaction", -11.006044387817383 ], [ "cre", -11.00614070892334 ], [ "▁protected", -11.006494522094727 ], [ "▁enfants", -11.006782531738281 ], [ "▁dort", -11.007554054260254 ], [ "▁Mel", -11.008041381835938 ], [ "▁turns", -11.00804615020752 ], [ "▁savings", -11.008106231689453 ], [ "▁voir", -11.008358001708984 ], [ "▁Boston", -11.008394241333008 ], [ "▁debate", -11.008469581604004 ], [ "▁SO", -11.008857727050781 ], [ "▁tables", -11.009193420410156 ], [ "▁honest", -11.009210586547852 ], [ "mate", -11.009283065795898 ], [ "▁chart", -11.0094633102417 ], [ "decât", -11.009682655334473 ], [ "▁Radio", -11.009685516357422 ], [ "54", -11.00986385345459 ], [ "▁vol", -11.010008811950684 ], [ "last", -11.010148048400879 ], [ "▁tall", -11.010408401489258 ], [ "▁Should", -11.010489463806152 ], [ "▁sink", -11.010525703430176 ], [ "▁Right", -11.010527610778809 ], [ "▁male", -11.010720252990723 ], [ "▁Modern", -11.010753631591797 ], [ "▁indeed", -11.010886192321777 ], [ "▁Garden", -11.011139869689941 ], [ "▁Mod", -11.011307716369629 ], [ "▁turning", -11.0115327835083 ], [ "▁inches", -11.011557579040527 ], [ "▁Police", -11.01183795928955 ], [ "▁Pay", -11.012016296386719 ], [ "UE", -11.0126371383667 ], [ "mé", -11.012652397155762 ], [ "EE", -11.013046264648438 ], [ "▁cookies", -11.013116836547852 ], [ "rip", -11.013351440429688 ], [ "▁Motor", -11.01352310180664 ], [ "▁lung", -11.01379680633545 ], [ "▁Ap", -11.013995170593262 ], [ "▁sustainable", -11.014066696166992 ], [ "▁instant", -11.014240264892578 ], [ "▁Rose", -11.014464378356934 ], [ "▁Carolina", -11.014906883239746 ], [ "▁Help", -11.014969825744629 ], [ "IE", -11.01535701751709 ], [ "▁Jersey", -11.015522956848145 ], [ "▁Spanish", -11.015586853027344 ], [ "▁wheel", -11.015660285949707 ], [ "▁fishing", -11.0158109664917 ], [ "gram", -11.015937805175781 ], [ "▁ST", -11.016227722167969 ], [ "▁Nov", -11.01632022857666 ], [ "▁reporting", -11.016362190246582 ], [ "ked", -11.016467094421387 ], [ "▁Leben", -11.016557693481445 ], [ "▁organisation", -11.016843795776367 ], [ "▁tiny", -11.017144203186035 ], [ "▁Alex", -11.017236709594727 ], [ "▁obtained", -11.017255783081055 ], [ "▁Acest", -11.017367362976074 ], [ "▁dangerous", -11.01749038696289 ], [ "utter", -11.017624855041504 ], [ "▁rev", -11.01801586151123 ], [ "Un", -11.018242835998535 ], [ "▁revealed", -11.018356323242188 ], [ "▁decade", -11.018709182739258 ], [ "▁possibility", -11.01945686340332 ], [ "service", -11.019577980041504 ], [ "è", -11.01966667175293 ], [ "▁Chief", -11.019674301147461 ], [ "▁Durch", -11.019795417785645 ], [ "▁cadre", -11.019843101501465 ], [ "▁wearing", -11.019845008850098 ], [ "sized", -11.01988410949707 ], [ "LY", -11.01989459991455 ], [ "▁unser", -11.019963264465332 ], [ "▁2016,", -11.019988059997559 ], [ "▁fail", -11.020028114318848 ], [ "iques", -11.020115852355957 ], [ "▁Angel", -11.020315170288086 ], [ "▁transportation", -11.020364761352539 ], [ "▁dates", -11.020395278930664 ], [ "▁danger", -11.020731925964355 ], [ "▁forum", -11.020828247070312 ], [ "zug", -11.020885467529297 ], [ "▁filed", -11.021199226379395 ], [ "loc", -11.021201133728027 ], [ "éri", -11.021234512329102 ], [ "tribu", -11.021393775939941 ], [ "▁entered", -11.021639823913574 ], [ "▁porte", -11.021928787231445 ], [ "▁arts", -11.021979331970215 ], [ "▁reform", -11.022001266479492 ], [ "▁Main", -11.022101402282715 ], [ "▁dir", -11.022111892700195 ], [ "▁approval", -11.022465705871582 ], [ "▁juice", -11.022750854492188 ], [ "vier", -11.022771835327148 ], [ "▁nivel", -11.02318000793457 ], [ "▁returns", -11.023423194885254 ], [ "▁formed", -11.023723602294922 ], [ "▁combine", -11.02436351776123 ], [ "▁cours", -11.024392127990723 ], [ "▁Standard", -11.024463653564453 ], [ "▁certification", -11.024677276611328 ], [ "escu", -11.024996757507324 ], [ "▁achieved", -11.025278091430664 ], [ "▁Model", -11.025280952453613 ], [ "rul", -11.025404930114746 ], [ "▁Tage", -11.025530815124512 ], [ "▁injuries", -11.02560806274414 ], [ "▁Sal", -11.025671005249023 ], [ "▁expenses", -11.025887489318848 ], [ "▁cet", -11.026009559631348 ], [ "▁taxes", -11.026028633117676 ], [ "diesen", -11.02626895904541 ], [ "▁fairly", -11.026638984680176 ], [ "▁Access", -11.026866912841797 ], [ "wind", -11.027122497558594 ], [ "IM", -11.027252197265625 ], [ "ense", -11.027548789978027 ], [ "▁hang", -11.027957916259766 ], [ "▁citizens", -11.028020858764648 ], [ "3%", -11.028101921081543 ], [ "lum", -11.028268814086914 ], [ "▁discussed", -11.028326034545898 ], [ "AC", -11.02841854095459 ], [ "‘", -11.0286865234375 ], [ "▁Sol", -11.028698921203613 ], [ "06", -11.028816223144531 ], [ "stellen", -11.029170989990234 ], [ "▁participation", -11.02917194366455 ], [ "▁Box", -11.029200553894043 ], [ "▁bieten", -11.029687881469727 ], [ "▁Louis", -11.029730796813965 ], [ "▁lessons", -11.029789924621582 ], [ "▁visible", -11.029966354370117 ], [ "▁Cam", -11.030128479003906 ], [ "▁Ban", -11.03053092956543 ], [ "▁Far", -11.03060245513916 ], [ "▁travers", -11.030759811401367 ], [ "▁telling", -11.030808448791504 ], [ "▁magic", -11.030855178833008 ], [ "▁Night", -11.031316757202148 ], [ "▁judge", -11.031400680541992 ], [ "▁Pat", -11.031482696533203 ], [ "▁Southern", -11.031901359558105 ], [ "OL", -11.031929969787598 ], [ "fully", -11.032191276550293 ], [ "▁acestea", -11.03223705291748 ], [ "▁Order", -11.032383918762207 ], [ "▁facut", -11.032523155212402 ], [ "▁Matt", -11.032600402832031 ], [ "registr", -11.03278923034668 ], [ "▁Yet", -11.032811164855957 ], [ "ß", -11.033596992492676 ], [ "▁făcut", -11.033618927001953 ], [ "▁versions", -11.033780097961426 ], [ "▁Force", -11.03396224975586 ], [ "rick", -11.034153938293457 ], [ "▁rund", -11.034563064575195 ], [ "ike", -11.034658432006836 ], [ "▁Young", -11.034675598144531 ], [ "▁ski", -11.034927368164062 ], [ "CU", -11.035385131835938 ], [ "▁Second", -11.035510063171387 ], [ "▁graduate", -11.03554916381836 ], [ "▁Bible", -11.036049842834473 ], [ "▁vary", -11.036060333251953 ], [ "▁celebration", -11.036151885986328 ], [ "▁risks", -11.036210060119629 ], [ "erii", -11.036327362060547 ], [ "rance", -11.036577224731445 ], [ "▁MP", -11.036787986755371 ], [ "▁tale", -11.036788940429688 ], [ "▁Ford", -11.037044525146484 ], [ "▁attached", -11.037278175354004 ], [ "▁Sy", -11.037312507629395 ], [ "▁Ly", -11.03765869140625 ], [ "stellung", -11.037687301635742 ], [ "▁trop", -11.0377197265625 ], [ "▁années", -11.037736892700195 ], [ "▁linked", -11.03792667388916 ], [ "pit", -11.038352012634277 ], [ "So", -11.03835391998291 ], [ "ţe", -11.038473129272461 ], [ "▁origin", -11.038509368896484 ], [ "▁boys", -11.039263725280762 ], [ "holder", -11.039352416992188 ], [ "read", -11.039461135864258 ], [ "▁relative", -11.03950023651123 ], [ "▁industries", -11.03958511352539 ], [ "making", -11.039688110351562 ], [ "▁tun", -11.039917945861816 ], [ "▁forced", -11.041061401367188 ], [ "▁Welcome", -11.041086196899414 ], [ "▁explained", -11.041138648986816 ], [ "MP", -11.041389465332031 ], [ "▁Three", -11.041613578796387 ], [ "aza", -11.041768074035645 ], [ "▁1999", -11.041924476623535 ], [ "▁erst", -11.042237281799316 ], [ "RS", -11.042623519897461 ], [ "▁attractive", -11.04279899597168 ], [ "▁visited", -11.042805671691895 ], [ "▁nom", -11.042874336242676 ], [ "▁drum", -11.042933464050293 ], [ "cast", -11.043068885803223 ], [ "ogen", -11.043105125427246 ], [ "▁tech", -11.04360294342041 ], [ "▁Comment", -11.043664932250977 ], [ "▁Little", -11.04405689239502 ], [ "▁suggested", -11.044086456298828 ], [ "▁gar", -11.044205665588379 ], [ "▁crack", -11.04458999633789 ], [ "▁shooting", -11.044676780700684 ], [ "▁Try", -11.044759750366211 ], [ "▁Remember", -11.045008659362793 ], [ "▁folks", -11.045217514038086 ], [ "▁MS", -11.045512199401855 ], [ "▁Dia", -11.04584789276123 ], [ "3)", -11.046561241149902 ], [ "arbeit", -11.04697036743164 ], [ "▁pepper", -11.047065734863281 ], [ "zz", -11.047107696533203 ], [ "▁extreme", -11.047235488891602 ], [ "▁extrem", -11.047367095947266 ], [ "▁severe", -11.047768592834473 ], [ "▁networks", -11.047882080078125 ], [ "păr", -11.047910690307617 ], [ "sent", -11.047933578491211 ], [ "▁structures", -11.048048973083496 ], [ "▁Join", -11.048078536987305 ], [ "▁privind", -11.048255920410156 ], [ "▁marriage", -11.04865837097168 ], [ "▁liegt", -11.048918724060059 ], [ "eben", -11.048995971679688 ], [ "▁produse", -11.049076080322266 ], [ "▁tested", -11.049090385437012 ], [ "▁Queen", -11.049134254455566 ], [ "▁Tax", -11.049687385559082 ], [ "rian", -11.049710273742676 ], [ "▁Problem", -11.050151824951172 ], [ "izat", -11.05023193359375 ], [ "udi", -11.050324440002441 ], [ "▁LA", -11.050718307495117 ], [ "▁afford", -11.051108360290527 ], [ "▁percentage", -11.05121898651123 ], [ "▁cute", -11.051547050476074 ], [ "▁gorgeous", -11.051891326904297 ], [ "▁indoor", -11.05190372467041 ], [ "▁configuration", -11.052103042602539 ], [ "▁immediate", -11.052303314208984 ], [ "▁exemple", -11.052450180053711 ], [ "▁Being", -11.052550315856934 ], [ "▁introduction", -11.052591323852539 ], [ "ella", -11.053206443786621 ], [ "bare", -11.053521156311035 ], [ "▁besser", -11.053539276123047 ], [ "▁Put", -11.053740501403809 ], [ "gon", -11.054248809814453 ], [ "▁Italy", -11.054259300231934 ], [ "▁Thus", -11.05435562133789 ], [ "tari", -11.054437637329102 ], [ "0.000", -11.054460525512695 ], [ "▁Price", -11.054651260375977 ], [ "▁Trust", -11.054824829101562 ], [ "▁contra", -11.054863929748535 ], [ "▁layout", -11.05504035949707 ], [ "▁Ireland", -11.055187225341797 ], [ "ctor", -11.055344581604004 ], [ "atoare", -11.055540084838867 ], [ "pra", -11.055729866027832 ], [ "rent", -11.055892944335938 ], [ "▁Seite", -11.05605411529541 ], [ "▁ori", -11.056280136108398 ], [ "spiel", -11.056541442871094 ], [ "▁Times", -11.056883811950684 ], [ "primarily", -11.056974411010742 ], [ "nov", -11.05703067779541 ], [ "▁desired", -11.057061195373535 ], [ "▁Would", -11.057072639465332 ], [ "PL", -11.057225227355957 ], [ "▁originally", -11.057367324829102 ], [ "▁Ana", -11.057463645935059 ], [ "EN", -11.05754566192627 ], [ "▁occasion", -11.05755615234375 ], [ "▁grant", -11.057572364807129 ], [ "igkeit", -11.057975769042969 ], [ "▁scheme", -11.058146476745605 ], [ "▁2015.", -11.058621406555176 ], [ "izare", -11.058778762817383 ], [ "gate", -11.058792114257812 ], [ "▁poker", -11.058899879455566 ], [ "pping", -11.058998107910156 ], [ "▁Wild", -11.059511184692383 ], [ "▁YouTube", -11.059995651245117 ], [ "▁assume", -11.060284614562988 ], [ "с", -11.060614585876465 ], [ "▁rapport", -11.060623168945312 ], [ "▁labor", -11.060996055603027 ], [ "teur", -11.061041831970215 ], [ "▁genre", -11.06116008758545 ], [ "▁plat", -11.061745643615723 ], [ "▁listening", -11.061750411987305 ], [ "sky", -11.061777114868164 ], [ "▁neighborhood", -11.061782836914062 ], [ "▁3-", -11.062150001525879 ], [ "▁Library", -11.062162399291992 ], [ "agit", -11.062249183654785 ], [ "▁platforms", -11.062849998474121 ], [ "bei", -11.062882423400879 ], [ "AB", -11.062897682189941 ], [ "▁manufacturers", -11.06295394897461 ], [ "▁printing", -11.063141822814941 ], [ "▁crisis", -11.063326835632324 ], [ "▁Smart", -11.06335163116455 ], [ "▁drawing", -11.063406944274902 ], [ "MO", -11.06348991394043 ], [ "▁durable", -11.063569068908691 ], [ "chant", -11.0636625289917 ], [ "▁chemical", -11.063764572143555 ], [ "▁savoir", -11.063776016235352 ], [ "▁Max", -11.063802719116211 ], [ "gestellt", -11.06380844116211 ], [ "▁rural", -11.063854217529297 ], [ "52", -11.064105033874512 ], [ "▁invited", -11.064169883728027 ], [ "▁fil", -11.0642728805542 ], [ "▁Rob", -11.064284324645996 ], [ "▁Bell", -11.064387321472168 ], [ "▁neck", -11.064831733703613 ], [ "pac", -11.064879417419434 ], [ "wal", -11.06491470336914 ], [ "▁là", -11.064922332763672 ], [ "▁Virginia", -11.065081596374512 ], [ "▁applicable", -11.06509017944336 ], [ "▁abuse", -11.065153121948242 ], [ "aide", -11.065321922302246 ], [ "▁increases", -11.065396308898926 ], [ "▁moi", -11.065568923950195 ], [ "▁Non", -11.065577507019043 ], [ "▁Produkt", -11.065627098083496 ], [ "FC", -11.065644264221191 ], [ "▁shops", -11.065677642822266 ], [ "▁prendre", -11.065923690795898 ], [ "atul", -11.065990447998047 ], [ "▁sal", -11.066137313842773 ], [ "▁société", -11.06627082824707 ], [ "▁Hot", -11.066329002380371 ], [ "rim", -11.066587448120117 ], [ "gue", -11.06661605834961 ], [ "▁enterprise", -11.066624641418457 ], [ "▁33", -11.067329406738281 ], [ "mittel", -11.067395210266113 ], [ "ged", -11.067439079284668 ], [ "▁formula", -11.06777286529541 ], [ "▁spin", -11.067784309387207 ], [ "als", -11.067826271057129 ], [ "2%", -11.06785774230957 ], [ "bon", -11.068192481994629 ], [ "▁Executive", -11.068323135375977 ], [ "▁wirklich", -11.068427085876465 ], [ "îl", -11.068608283996582 ], [ "1.", -11.068917274475098 ], [ "▁Arm", -11.069157600402832 ], [ "▁rid", -11.069358825683594 ], [ "aries", -11.069727897644043 ], [ "▁incident", -11.06982421875 ], [ "▁copii", -11.070008277893066 ], [ "▁Charles", -11.070141792297363 ], [ "▁meals", -11.070147514343262 ], [ "▁wireless", -11.070237159729004 ], [ "Ex", -11.070364952087402 ], [ "▁Financial", -11.070540428161621 ], [ "▁AM", -11.070615768432617 ], [ "▁fest", -11.070645332336426 ], [ "▁Ol", -11.071410179138184 ], [ "oir", -11.071447372436523 ], [ "300", -11.071893692016602 ], [ "▁punct", -11.072138786315918 ], [ "▁Mad", -11.07283878326416 ], [ "▁Ali", -11.072907447814941 ], [ "lag", -11.073214530944824 ], [ "▁ocean", -11.073314666748047 ], [ "▁mirror", -11.073326110839844 ], [ "▁Additionally", -11.073869705200195 ], [ "alia", -11.073884963989258 ], [ "▁county", -11.073899269104004 ], [ "▁hip", -11.074305534362793 ], [ "dale", -11.074395179748535 ], [ "▁Stra", -11.074429512023926 ], [ "▁drag", -11.074575424194336 ], [ "▁Sand", -11.074851036071777 ], [ "▁historic", -11.074980735778809 ], [ "ière", -11.075427055358887 ], [ "▁examine", -11.075624465942383 ], [ "soci", -11.075634002685547 ], [ "ime", -11.076088905334473 ], [ "▁Insurance", -11.07621955871582 ], [ "▁crime", -11.076736450195312 ], [ "▁pare", -11.076945304870605 ], [ "▁craft", -11.077105522155762 ], [ "▁Building", -11.077279090881348 ], [ "mission", -11.077534675598145 ], [ "▁Americans", -11.077573776245117 ], [ "▁mg", -11.077799797058105 ], [ "▁passage", -11.077938079833984 ], [ "▁deposit", -11.078346252441406 ], [ "▁widely", -11.078444480895996 ], [ "nch", -11.078453063964844 ], [ "▁Coast", -11.078756332397461 ], [ "▁recipes", -11.078784942626953 ], [ "▁Ziel", -11.07951545715332 ], [ "▁duty", -11.079646110534668 ], [ "▁gerne", -11.079704284667969 ], [ "most", -11.080034255981445 ], [ "▁argument", -11.080158233642578 ], [ "▁root", -11.08021354675293 ], [ "▁consult", -11.08024787902832 ], [ "▁muscle", -11.080255508422852 ], [ "▁spoke", -11.08038330078125 ], [ "▁Cum", -11.080950736999512 ], [ "▁orange", -11.081033706665039 ], [ "▁reader", -11.081123352050781 ], [ "schw", -11.081151008605957 ], [ "▁commission", -11.081332206726074 ], [ "histoire", -11.081811904907227 ], [ "▁represents", -11.082064628601074 ], [ "▁meilleur", -11.082343101501465 ], [ "▁10.", -11.082358360290527 ], [ "HA", -11.082427024841309 ], [ "▁Systems", -11.082573890686035 ], [ "▁blind", -11.082603454589844 ], [ "▁HP", -11.083221435546875 ], [ "▁doi", -11.083307266235352 ], [ "▁signature", -11.083404541015625 ], [ "▁invite", -11.083505630493164 ], [ "▁Samsung", -11.083802223205566 ], [ "▁liber", -11.083942413330078 ], [ "▁letters", -11.0840482711792 ], [ "▁primul", -11.084186553955078 ], [ "▁losing", -11.084328651428223 ], [ "resulting", -11.084467887878418 ], [ "▁Computer", -11.08474063873291 ], [ "▁poll", -11.0847749710083 ], [ "rile", -11.085102081298828 ], [ "TI", -11.085142135620117 ], [ "▁cur", -11.08566951751709 ], [ "▁fonction", -11.085833549499512 ], [ "gat", -11.086359977722168 ], [ "AA", -11.086480140686035 ], [ "tiv", -11.086692810058594 ], [ "▁Str", -11.087076187133789 ], [ "ești", -11.087677955627441 ], [ "▁officer", -11.0877046585083 ], [ "reducing", -11.08772087097168 ], [ "▁gifts", -11.08780288696289 ], [ "▁performing", -11.08788776397705 ], [ "▁»,", -11.088349342346191 ], [ "▁guitar", -11.08838939666748 ], [ "▁segment", -11.088580131530762 ], [ "▁Tar", -11.08861255645752 ], [ "▁ultimately", -11.088805198669434 ], [ "▁cam", -11.088960647583008 ], [ "▁Arbeit", -11.089076042175293 ], [ "▁accessories", -11.089418411254883 ], [ "bad", -11.089820861816406 ], [ "home", -11.0899019241333 ], [ "▁clip", -11.08995532989502 ], [ "range", -11.090432167053223 ], [ "CM", -11.090867042541504 ], [ "▁printed", -11.090883255004883 ], [ "▁Pet", -11.091177940368652 ], [ "▁attract", -11.091333389282227 ], [ "date", -11.091501235961914 ], [ "▁Senior", -11.091503143310547 ], [ "▁genau", -11.092177391052246 ], [ "num", -11.092435836791992 ], [ "▁attended", -11.092674255371094 ], [ "▁Turn", -11.092824935913086 ], [ "▁History", -11.092830657958984 ], [ "some", -11.092852592468262 ], [ "▁describe", -11.09308910369873 ], [ "▁Lee", -11.093143463134766 ], [ "▁Fre", -11.093314170837402 ], [ "▁league", -11.093345642089844 ], [ "new", -11.093505859375 ], [ "tors", -11.093535423278809 ], [ "▁storm", -11.094005584716797 ], [ "▁Beispiel", -11.094197273254395 ], [ "▁index", -11.094344139099121 ], [ "▁awarded", -11.094613075256348 ], [ "state", -11.094625473022461 ], [ "▁1990", -11.094874382019043 ], [ "▁ends", -11.094902992248535 ], [ "kor", -11.095070838928223 ], [ "far", -11.095418930053711 ], [ "▁Page", -11.095541000366211 ], [ "▁promotion", -11.095610618591309 ], [ "▁weekly", -11.095726013183594 ], [ "400", -11.095966339111328 ], [ "iuni", -11.096365928649902 ], [ "▁Summer", -11.096376419067383 ], [ "▁thin", -11.096627235412598 ], [ "▁dafür", -11.09669303894043 ], [ "51", -11.096769332885742 ], [ "PR", -11.096978187561035 ], [ "▁Hy", -11.097001075744629 ], [ "gas", -11.097013473510742 ], [ "▁atat", -11.097166061401367 ], [ "▁mining", -11.097347259521484 ], [ "▁principles", -11.09741497039795 ], [ "gent", -11.097545623779297 ], [ "ika", -11.097685813903809 ], [ "▁religion", -11.097787857055664 ], [ "▁ordered", -11.098284721374512 ], [ "▁developers", -11.098298072814941 ], [ "▁pleasure", -11.098456382751465 ], [ "vit", -11.098505020141602 ], [ "mers", -11.0988130569458 ], [ "▁Section", -11.098873138427734 ], [ "▁por", -11.098960876464844 ], [ "▁Name", -11.099200248718262 ], [ "▁pink", -11.099260330200195 ], [ "dig", -11.09934139251709 ], [ "▁eligible", -11.099397659301758 ], [ "▁Happy", -11.09941577911377 ], [ "▁fo", -11.099480628967285 ], [ "▁availability", -11.099541664123535 ], [ "GO", -11.099583625793457 ], [ "▁Europa", -11.099637985229492 ], [ "▁Unit", -11.099656105041504 ], [ "▁1000", -11.099837303161621 ], [ "▁Berg", -11.099846839904785 ], [ "fini", -11.099853515625 ], [ "▁$3", -11.100565910339355 ], [ "iza", -11.100749969482422 ], [ "▁promo", -11.100830078125 ], [ "▁Low", -11.101234436035156 ], [ "abord", -11.101326942443848 ], [ "äh", -11.101485252380371 ], [ "▁Professor", -11.101570129394531 ], [ "▁array", -11.101579666137695 ], [ "▁hate", -11.101594924926758 ], [ "▁recording", -11.101601600646973 ], [ "RI", -11.101649284362793 ], [ "▁proof", -11.101710319519043 ], [ "lay", -11.10185718536377 ], [ "DE", -11.102007865905762 ], [ "▁surprised", -11.102066040039062 ], [ "▁boxes", -11.102193832397461 ], [ "▁noastre", -11.102386474609375 ], [ "zie", -11.102387428283691 ], [ "▁însă", -11.10254192352295 ], [ "▁ajuta", -11.102783203125 ], [ "▁weil", -11.1028413772583 ], [ "▁whenever", -11.103026390075684 ], [ "shi", -11.103194236755371 ], [ "satz", -11.103605270385742 ], [ "▁remind", -11.10401725769043 ], [ "▁consist", -11.10412311553955 ], [ "▁motiv", -11.104240417480469 ], [ "▁PS", -11.1043062210083 ], [ "▁trois", -11.104543685913086 ], [ "pad", -11.10477352142334 ], [ "▁besten", -11.104904174804688 ], [ "▁Stone", -11.105140686035156 ], [ "itz", -11.105157852172852 ], [ "fit", -11.105164527893066 ], [ "▁Mountain", -11.105178833007812 ], [ "OC", -11.10519027709961 ], [ "▁depends", -11.105228424072266 ], [ "▁Cover", -11.105387687683105 ], [ "▁bags", -11.106058120727539 ], [ "▁Bel", -11.106199264526367 ], [ "▁Engineering", -11.106304168701172 ], [ "▁flower", -11.106647491455078 ], [ "▁gratuit", -11.106670379638672 ], [ "▁smartphone", -11.106780052185059 ], [ "stan", -11.107197761535645 ], [ "spect", -11.10726261138916 ], [ "SL", -11.107282638549805 ], [ "sho", -11.10738754272461 ], [ "▁Ser", -11.10791301727295 ], [ "▁Perhaps", -11.108247756958008 ], [ "▁codes", -11.108342170715332 ], [ "▁Wind", -11.10849666595459 ], [ "aient", -11.108757019042969 ], [ "▁Prin", -11.108802795410156 ], [ "▁(1)", -11.109090805053711 ], [ "▁figures", -11.109450340270996 ], [ "▁ausge", -11.10972785949707 ], [ "▁episode", -11.110050201416016 ], [ "▁Spa", -11.110370635986328 ], [ "▁Silver", -11.110386848449707 ], [ "▁Sky", -11.110396385192871 ], [ "▁capabilities", -11.1107177734375 ], [ "▁Uni", -11.11073112487793 ], [ "▁încă", -11.110876083374023 ], [ "TO", -11.111289978027344 ], [ "▁Hal", -11.111358642578125 ], [ "ghi", -11.111414909362793 ], [ "▁sofa", -11.111438751220703 ], [ "hard", -11.11150074005127 ], [ "▁FOR", -11.111587524414062 ], [ "▁Ber", -11.111820220947266 ], [ "▁firms", -11.11187744140625 ], [ "▁memories", -11.111883163452148 ], [ "▁lift", -11.11214542388916 ], [ "▁sending", -11.11214542388916 ], [ "▁narrow", -11.112646102905273 ], [ "▁Steve", -11.112784385681152 ], [ "▁integration", -11.112905502319336 ], [ "known", -11.113122940063477 ], [ "▁nostru", -11.113237380981445 ], [ "iţi", -11.113422393798828 ], [ "▁Georgia", -11.113759994506836 ], [ "▁slowly", -11.114026069641113 ], [ "iere", -11.114028930664062 ], [ "aka", -11.114255905151367 ], [ "PE", -11.114320755004883 ], [ "▁venue", -11.11468505859375 ], [ "jar", -11.11474609375 ], [ "buch", -11.114755630493164 ], [ "rad", -11.114858627319336 ], [ "▁resistance", -11.114899635314941 ], [ "▁stehen", -11.114914894104004 ], [ "chin", -11.11504077911377 ], [ "▁weak", -11.11535358428955 ], [ "▁DVD", -11.115598678588867 ], [ "▁bodies", -11.115856170654297 ], [ "▁split", -11.115884780883789 ], [ "What", -11.116231918334961 ], [ "setzen", -11.116467475891113 ], [ "▁loves", -11.116561889648438 ], [ "▁kleine", -11.117077827453613 ], [ "▁increasingly", -11.11746883392334 ], [ "▁alert", -11.117583274841309 ], [ "▁AC", -11.117647171020508 ], [ "▁partir", -11.117974281311035 ], [ "▁ratio", -11.11807918548584 ], [ "▁keeps", -11.118539810180664 ], [ "▁Area", -11.118544578552246 ], [ "▁données", -11.119071960449219 ], [ "▁flag", -11.119254112243652 ], [ "▁NO", -11.119277000427246 ], [ "▁hotels", -11.119336128234863 ], [ "▁debut", -11.119365692138672 ], [ "▁suffer", -11.119368553161621 ], [ "▁hidden", -11.119810104370117 ], [ "▁clothing", -11.120074272155762 ], [ "▁household", -11.120235443115234 ], [ "medi", -11.120268821716309 ], [ "▁reste", -11.120274543762207 ], [ "bro", -11.120381355285645 ], [ "▁Bus", -11.120405197143555 ], [ "▁Ken", -11.120572090148926 ], [ "IR", -11.120758056640625 ], [ "▁suffering", -11.121212005615234 ], [ "▁publication", -11.121246337890625 ], [ "▁Mat", -11.121360778808594 ], [ "▁impression", -11.121509552001953 ], [ "▁founded", -11.121562957763672 ], [ "▁stable", -11.121566772460938 ], [ "▁promise", -11.121719360351562 ], [ "▁Cloud", -11.121770858764648 ], [ "▁prison", -11.122099876403809 ], [ "cor", -11.122355461120605 ], [ "▁Sports", -11.122716903686523 ], [ "▁erste", -11.122745513916016 ], [ "shire", -11.122757911682129 ], [ "▁recommendations", -11.122916221618652 ], [ "▁permit", -11.123100280761719 ], [ "▁tomorrow", -11.123126983642578 ], [ "▁lucky", -11.123422622680664 ], [ "▁realized", -11.123449325561523 ], [ "▁famille", -11.123473167419434 ], [ "▁Zealand", -11.123542785644531 ], [ "▁wooden", -11.123601913452148 ], [ "▁east", -11.124269485473633 ], [ "▁Bereich", -11.12458324432373 ], [ "während", -11.124653816223145 ], [ "rite", -11.124836921691895 ], [ "▁fla", -11.124902725219727 ], [ "platz", -11.124991416931152 ], [ "▁zero", -11.125292778015137 ], [ "▁priority", -11.12535572052002 ], [ "▁Airport", -11.125506401062012 ], [ "▁Kauf", -11.125590324401855 ], [ "▁ultimate", -11.12601375579834 ], [ "▁chest", -11.126175880432129 ], [ "▁tone", -11.126376152038574 ], [ "▁Kal", -11.126431465148926 ], [ "▁supposed", -11.12669849395752 ], [ "▁vedere", -11.126846313476562 ], [ "▁50%", -11.126872062683105 ], [ "▁Ger", -11.127785682678223 ], [ "pack", -11.127849578857422 ], [ "▁priv", -11.128241539001465 ], [ "▁Kit", -11.128263473510742 ], [ "▁tent", -11.128457069396973 ], [ "▁guidelines", -11.128461837768555 ], [ "▁Republic", -11.128824234008789 ], [ "including", -11.129239082336426 ], [ "▁chief", -11.129615783691406 ], [ "▁Living", -11.129766464233398 ], [ "keit", -11.1298189163208 ], [ "▁convert", -11.129831314086914 ], [ "tail", -11.129928588867188 ], [ "orient", -11.129960060119629 ], [ "eigenen", -11.130245208740234 ], [ "▁soup", -11.130587577819824 ], [ "▁zona", -11.130661010742188 ], [ "▁composition", -11.130690574645996 ], [ "▁Bob", -11.130831718444824 ], [ "▁exception", -11.131170272827148 ], [ "▁cr", -11.131287574768066 ], [ "▁str", -11.131482124328613 ], [ "▁Fl", -11.13178825378418 ], [ "AT", -11.131909370422363 ], [ "kel", -11.132002830505371 ], [ "▁pricing", -11.132189750671387 ], [ "▁Mass", -11.132258415222168 ], [ "vir", -11.132333755493164 ], [ "leg", -11.132448196411133 ], [ "▁rating", -11.132455825805664 ], [ "▁Sale", -11.132628440856934 ], [ "▁somewhere", -11.132866859436035 ], [ "▁submitted", -11.133084297180176 ], [ "▁Pop", -11.133296012878418 ], [ "▁papers", -11.13330364227295 ], [ "▁authorities", -11.133326530456543 ], [ "▁Person", -11.133381843566895 ], [ "▁kill", -11.133512496948242 ], [ "▁suggestions", -11.133548736572266 ], [ "-6", -11.133644104003906 ], [ "▁dust", -11.133750915527344 ], [ "taire", -11.133805274963379 ], [ "▁recognition", -11.133870124816895 ], [ "3.", -11.134047508239746 ], [ "▁Mont", -11.134230613708496 ], [ "▁produit", -11.13430118560791 ], [ "▁transmission", -11.134340286254883 ], [ "▁Th", -11.13475513458252 ], [ "▁passing", -11.134928703308105 ], [ "▁Partner", -11.135161399841309 ], [ "▁dire", -11.135205268859863 ], [ "▁DC", -11.135432243347168 ], [ "▁sky", -11.135659217834473 ], [ "▁Kitchen", -11.135890007019043 ], [ "▁fluid", -11.135929107666016 ], [ "▁scored", -11.136005401611328 ], [ "▁chapter", -11.136100769042969 ], [ "If", -11.136231422424316 ], [ "letzten", -11.136275291442871 ], [ "▁officers", -11.13641357421875 ], [ "▁avem", -11.136631965637207 ], [ "ister", -11.136666297912598 ], [ "▁involves", -11.136688232421875 ], [ "ico", -11.136898040771484 ], [ "bur", -11.137056350708008 ], [ "▁mieux", -11.137064933776855 ], [ "▁Photo", -11.1371431350708 ], [ "▁Cro", -11.137228012084961 ], [ "▁professor", -11.137245178222656 ], [ "▁besonders", -11.137313842773438 ], [ "д", -11.137367248535156 ], [ "▁alongside", -11.137382507324219 ], [ "▁stored", -11.13770580291748 ], [ "▁activ", -11.137849807739258 ], [ "▁setup", -11.138169288635254 ], [ "▁extract", -11.138627052307129 ], [ "▁accent", -11.138633728027344 ], [ "▁replaced", -11.138638496398926 ], [ "tec", -11.138800621032715 ], [ "▁Natur", -11.138848304748535 ], [ "▁Pacific", -11.138887405395508 ], [ "▁NY", -11.139485359191895 ], [ "▁Capital", -11.139583587646484 ], [ "▁forest", -11.13969898223877 ], [ "incredibly", -11.14006233215332 ], [ "▁choix", -11.14021110534668 ], [ "▁seriously", -11.140281677246094 ], [ "▁konnte", -11.14030933380127 ], [ "▁2014.", -11.140443801879883 ], [ "ensuring", -11.140534400939941 ], [ "▁handling", -11.140661239624023 ], [ "▁9.", -11.140715599060059 ], [ "▁relations", -11.140876770019531 ], [ "▁Kom", -11.141045570373535 ], [ "▁Hol", -11.141282081604004 ], [ "▁none", -11.141515731811523 ], [ "rob", -11.141718864440918 ], [ "▁Forum", -11.141759872436523 ], [ "hour", -11.141776084899902 ], [ "ème", -11.141809463500977 ], [ "▁Space", -11.141986846923828 ], [ "▁Ham", -11.142992973327637 ], [ "rap", -11.143169403076172 ], [ "▁Michigan", -11.14317512512207 ], [ "km", -11.143202781677246 ], [ "▁utilize", -11.143548965454102 ], [ "lov", -11.143775939941406 ], [ "▁luck", -11.144388198852539 ], [ "lä", -11.144824981689453 ], [ "▁healing", -11.145010948181152 ], [ "▁neu", -11.145182609558105 ], [ "aging", -11.145251274108887 ], [ "▁compliance", -11.145583152770996 ], [ "▁vertical", -11.145675659179688 ], [ "▁FREE", -11.145729064941406 ], [ "▁differences", -11.146014213562012 ], [ "▁Server", -11.146252632141113 ], [ "▁estimated", -11.146378517150879 ], [ "schutz", -11.146692276000977 ], [ "▁notamment", -11.146736145019531 ], [ "▁120", -11.146919250488281 ], [ "72", -11.147282600402832 ], [ "▁heating", -11.147347450256348 ], [ "late", -11.14756965637207 ], [ "▁younger", -11.14783000946045 ], [ "▁Intel", -11.148171424865723 ], [ "▁salad", -11.148362159729004 ], [ "▁commonly", -11.148563385009766 ], [ "▁treatments", -11.148682594299316 ], [ "▁speaker", -11.148770332336426 ], [ "▁producing", -11.149120330810547 ], [ "▁eggs", -11.149367332458496 ], [ "▁Spirit", -11.149892807006836 ], [ "▁beide", -11.149918556213379 ], [ "▁transaction", -11.150283813476562 ], [ "▁Machine", -11.150464057922363 ], [ "▁Games", -11.150527000427246 ], [ "▁niveau", -11.150687217712402 ], [ "▁Need", -11.15082836151123 ], [ "radi", -11.150959968566895 ], [ "mir", -11.15096664428711 ], [ "causing", -11.151000022888184 ], [ "▁début", -11.151042938232422 ], [ "▁rencontre", -11.151063919067383 ], [ "▁threat", -11.151153564453125 ], [ "▁enjoying", -11.151320457458496 ], [ "Com", -11.151386260986328 ], [ "▁Johnson", -11.151555061340332 ], [ "▁tournament", -11.15156364440918 ], [ "▁Micro", -11.151582717895508 ], [ "▁Drive", -11.151667594909668 ], [ "▁Cre", -11.151866912841797 ], [ "▁Lebens", -11.151930809020996 ], [ "▁categories", -11.152358055114746 ], [ "5,000", -11.15261173248291 ], [ "▁confirmed", -11.152617454528809 ], [ "pli", -11.152763366699219 ], [ "▁Francisco", -11.153139114379883 ], [ "▁raw", -11.153157234191895 ], [ "▁managers", -11.153223991394043 ], [ "ţie", -11.153365135192871 ], [ "UR", -11.153368949890137 ], [ "▁aproape", -11.154065132141113 ], [ "via", -11.154606819152832 ], [ "▁engaged", -11.154646873474121 ], [ "▁parti", -11.154741287231445 ], [ "▁posting", -11.15517807006836 ], [ "CO", -11.155484199523926 ], [ "▁bois", -11.155815124511719 ], [ "▁inch", -11.15590763092041 ], [ "vie", -11.156068801879883 ], [ "▁aside", -11.156314849853516 ], [ "▁exceptional", -11.15658950805664 ], [ "▁vintage", -11.156668663024902 ], [ "▁Him", -11.156795501708984 ], [ "▁expansion", -11.156806945800781 ], [ "▁Weg", -11.157122611999512 ], [ "▁authors", -11.157535552978516 ], [ "▁deine", -11.15764045715332 ], [ "▁Prime", -11.158016204833984 ], [ "▁scan", -11.158055305480957 ], [ "▁reg", -11.158112525939941 ], [ "ția", -11.158141136169434 ], [ "riv", -11.158258438110352 ], [ "selon", -11.158440589904785 ], [ "▁Studio", -11.158571243286133 ], [ "▁dich", -11.158658027648926 ], [ "▁vi", -11.158745765686035 ], [ "▁sequence", -11.159016609191895 ], [ "▁Four", -11.159046173095703 ], [ "RT", -11.159050941467285 ], [ "▁ihn", -11.159072875976562 ], [ "▁employ", -11.159223556518555 ], [ "umb", -11.159659385681152 ], [ "ită", -11.159818649291992 ], [ "▁Station", -11.159950256347656 ], [ "▁upload", -11.159972190856934 ], [ "▁upgrade", -11.160445213317871 ], [ "▁exterior", -11.160528182983398 ], [ "▁writers", -11.160531997680664 ], [ "▁plot", -11.160543441772461 ], [ "▁Gen", -11.16068172454834 ], [ "TER", -11.160821914672852 ], [ "-12", -11.160930633544922 ], [ "http", -11.162168502807617 ], [ "▁smell", -11.1621732711792 ], [ "post", -11.162522315979004 ], [ "von", -11.162790298461914 ], [ "mili", -11.16280746459961 ], [ "8%", -11.162972450256348 ], [ "▁Andrew", -11.163065910339355 ], [ "▁spun", -11.16321086883545 ], [ "▁grass", -11.163444519042969 ], [ "unter", -11.163474082946777 ], [ "▁burn", -11.16356086730957 ], [ "▁Gegen", -11.163601875305176 ], [ "fest", -11.163721084594727 ], [ "▁Northern", -11.163738250732422 ], [ "▁consumption", -11.163775444030762 ], [ "▁bird", -11.164069175720215 ], [ "▁Miss", -11.164369583129883 ], [ "anti", -11.16447925567627 ], [ "▁viata", -11.164583206176758 ], [ "bereich", -11.164602279663086 ], [ "▁Change", -11.164871215820312 ], [ "▁pouvoir", -11.165255546569824 ], [ "▁demonstrate", -11.165435791015625 ], [ "▁requirement", -11.165483474731445 ], [ "BI", -11.16577434539795 ], [ "ied", -11.166099548339844 ], [ "▁spray", -11.166358947753906 ], [ "▁calitate", -11.166379928588867 ], [ "▁souvent", -11.1665620803833 ], [ "▁samples", -11.166682243347168 ], [ "▁compete", -11.166930198669434 ], [ "ank", -11.166946411132812 ], [ "année", -11.167037963867188 ], [ "wick", -11.167183876037598 ], [ "iff", -11.167254447937012 ], [ "noi", -11.167255401611328 ], [ "ography", -11.167450904846191 ], [ "▁SE", -11.167508125305176 ], [ "▁250", -11.16779899597168 ], [ "▁wealth", -11.167884826660156 ], [ "4%", -11.168235778808594 ], [ "▁swimming", -11.168269157409668 ], [ "enne", -11.168338775634766 ], [ "Qu", -11.168400764465332 ], [ "▁connections", -11.168476104736328 ], [ "onne", -11.16852855682373 ], [ "▁Way", -11.168676376342773 ], [ "voll", -11.168793678283691 ], [ "▁extent", -11.169041633605957 ], [ "▁objective", -11.169572830200195 ], [ "▁clinic", -11.169581413269043 ], [ "NA", -11.169848442077637 ], [ "▁Hope", -11.170098304748535 ], [ "▁coat", -11.170331954956055 ], [ "▁depend", -11.170393943786621 ], [ "▁tine", -11.170463562011719 ], [ "acc", -11.170486450195312 ], [ "▁editor", -11.170598983764648 ], [ "▁Jim", -11.170690536499023 ], [ "600", -11.171262741088867 ], [ "▁module", -11.171302795410156 ], [ "▁deja", -11.171821594238281 ], [ "atur", -11.171841621398926 ], [ "▁maintaining", -11.171918869018555 ], [ "▁hoch", -11.172059059143066 ], [ "▁covering", -11.17239761352539 ], [ "vielen", -11.172450065612793 ], [ "hem", -11.172531127929688 ], [ "▁illegal", -11.172656059265137 ], [ "▁certificate", -11.17329216003418 ], [ "▁collective", -11.173357963562012 ], [ "▁blow", -11.17343807220459 ], [ "▁programming", -11.17343807220459 ], [ "HE", -11.173727989196777 ], [ "▁Division", -11.173842430114746 ], [ "▁ceux", -11.174081802368164 ], [ "▁saved", -11.174202919006348 ], [ "▁worst", -11.17426586151123 ], [ "▁arms", -11.17430305480957 ], [ "▁Officer", -11.17463493347168 ], [ "▁association", -11.174838066101074 ], [ "ington", -11.1749906539917 ], [ "▁belle", -11.175024032592773 ], [ "tting", -11.17537784576416 ], [ "▁attacks", -11.175446510314941 ], [ "▁vei", -11.17546558380127 ], [ "▁gerade", -11.175470352172852 ], [ "▁strain", -11.175748825073242 ], [ "▁offices", -11.1759672164917 ], [ "EM", -11.17627239227295 ], [ "EST", -11.176509857177734 ], [ "-8", -11.176758766174316 ], [ "▁faculty", -11.176998138427734 ], [ "▁Plant", -11.177046775817871 ], [ "pla", -11.177295684814453 ], [ "card", -11.177618980407715 ], [ "▁loose", -11.177982330322266 ], [ "▁PR", -11.178044319152832 ], [ "profit", -11.178071022033691 ], [ "▁channels", -11.178119659423828 ], [ "ATE", -11.178257942199707 ], [ "atic", -11.178304672241211 ], [ "wegen", -11.178404808044434 ], [ "word", -11.178621292114258 ], [ "▁sehen", -11.178659439086914 ], [ "▁nombre", -11.178744316101074 ], [ "▁DO", -11.178763389587402 ], [ "▁hoping", -11.178949356079102 ], [ "▁wollen", -11.179091453552246 ], [ "▁decat", -11.179244995117188 ], [ "IF", -11.179386138916016 ], [ "▁permission", -11.179396629333496 ], [ "▁Williams", -11.179936408996582 ], [ "▁beer", -11.179962158203125 ], [ "▁dernière", -11.180052757263184 ], [ "▁purchasing", -11.18025016784668 ], [ "▁pride", -11.180416107177734 ], [ "solv", -11.180598258972168 ], [ "ego", -11.180691719055176 ], [ "▁Oil", -11.18079662322998 ], [ "▁dishes", -11.18102741241455 ], [ "▁Baby", -11.181109428405762 ], [ "▁Roll", -11.181137084960938 ], [ "vez", -11.18134593963623 ], [ "▁drept", -11.181367874145508 ], [ "lly", -11.18148136138916 ], [ "▁potrivit", -11.181495666503906 ], [ "person", -11.181961059570312 ], [ "▁interactive", -11.182269096374512 ], [ "▁brilliant", -11.182304382324219 ], [ "▁000", -11.182357788085938 ], [ "▁giant", -11.182657241821289 ], [ "▁plain", -11.182945251464844 ], [ "▁lock", -11.183197975158691 ], [ "▁inspection", -11.183762550354004 ], [ "▁symbol", -11.18392276763916 ], [ "▁Gal", -11.183953285217285 ], [ "▁concepts", -11.1840181350708 ], [ "▁venture", -11.18411922454834 ], [ "▁Tr", -11.184402465820312 ], [ "▁Color", -11.184469223022461 ], [ "▁behalf", -11.184635162353516 ], [ "ink", -11.184715270996094 ], [ "atii", -11.1848726272583 ], [ "wie", -11.184907913208008 ], [ "▁stream", -11.18514347076416 ], [ "▁buyers", -11.185192108154297 ], [ "legen", -11.185526847839355 ], [ "iness", -11.18578815460205 ], [ "▁absolute", -11.185945510864258 ], [ "▁council", -11.186067581176758 ], [ "▁displayed", -11.186172485351562 ], [ "▁Bun", -11.186405181884766 ], [ "▁darauf", -11.186585426330566 ], [ "▁rod", -11.186829566955566 ], [ "▁repeat", -11.186898231506348 ], [ "quelle", -11.187023162841797 ], [ "lation", -11.187433242797852 ], [ "gul", -11.18774700164795 ], [ "▁compensation", -11.188064575195312 ], [ "▁string", -11.1881685256958 ], [ "▁joining", -11.188251495361328 ], [ "▁Pra", -11.188429832458496 ], [ "hab", -11.188936233520508 ], [ "▁plane", -11.189024925231934 ], [ "▁conversion", -11.189078330993652 ], [ "▁lesson", -11.189361572265625 ], [ "bound", -11.1893949508667 ], [ "▁seats", -11.18946361541748 ], [ "voc", -11.189902305603027 ], [ "▁Disney", -11.190120697021484 ], [ "esse", -11.190277099609375 ], [ "▁awards", -11.190279006958008 ], [ "▁initiative", -11.190483093261719 ], [ "UM", -11.19050407409668 ], [ "▁intelligence", -11.190763473510742 ], [ "▁laser", -11.191128730773926 ], [ "än", -11.191228866577148 ], [ "▁generated", -11.191231727600098 ], [ "▁allen", -11.19186782836914 ], [ "▁Aug", -11.19261360168457 ], [ "lini", -11.192968368530273 ], [ "▁Update", -11.193015098571777 ], [ "▁grab", -11.193095207214355 ], [ "▁Bridge", -11.193219184875488 ], [ "rock", -11.193289756774902 ], [ "hold", -11.193461418151855 ], [ "seinen", -11.193643569946289 ], [ "▁false", -11.193758010864258 ], [ "type", -11.193792343139648 ], [ "▁outcome", -11.193906784057617 ], [ "▁crazy", -11.194161415100098 ], [ "▁Platz", -11.194281578063965 ], [ "▁believed", -11.194426536560059 ], [ "▁adjust", -11.194503784179688 ], [ "▁entrance", -11.194644927978516 ], [ "▁Colorado", -11.194751739501953 ], [ "▁concentration", -11.194865226745605 ], [ "aid", -11.194958686828613 ], [ "▁regardless", -11.195035934448242 ], [ "▁mici", -11.195063591003418 ], [ "▁potentially", -11.195109367370605 ], [ "▁Custom", -11.195867538452148 ], [ "rag", -11.196009635925293 ], [ "▁employer", -11.19604206085205 ], [ "tagged", -11.196158409118652 ], [ "▁34", -11.196271896362305 ], [ "fro", -11.196895599365234 ], [ "▁Pas", -11.197010040283203 ], [ "▁AS", -11.197013854980469 ], [ "PP", -11.197031021118164 ], [ "stru", -11.19741439819336 ], [ "grâce", -11.198037147521973 ], [ "▁anyway", -11.198240280151367 ], [ "▁streets", -11.1986083984375 ], [ "▁Region", -11.199190139770508 ], [ "▁newly", -11.199280738830566 ], [ "▁assistant", -11.199461936950684 ], [ "▁requests", -11.199618339538574 ], [ "▁Ohio", -11.199705123901367 ], [ "▁continuing", -11.200072288513184 ], [ "▁îm", -11.200136184692383 ], [ "7%", -11.20031452178955 ], [ "▁basically", -11.200325965881348 ], [ "gabe", -11.200334548950195 ], [ "▁ultra", -11.200355529785156 ], [ "pic", -11.200571060180664 ], [ "▁jeder", -11.200939178466797 ], [ "▁Cook", -11.201225280761719 ], [ "▁tie", -11.201227188110352 ], [ "▁yard", -11.20151424407959 ], [ "▁wash", -11.20152759552002 ], [ "▁3,", -11.20194149017334 ], [ "▁exista", -11.202128410339355 ], [ "▁egg", -11.202342987060547 ], [ "▁marché", -11.202616691589355 ], [ "kommen", -11.202630996704102 ], [ "▁Select", -11.202999114990234 ], [ "geben", -11.203126907348633 ], [ "▁Joseph", -11.203531265258789 ], [ "▁Ces", -11.203642845153809 ], [ "▁hundred", -11.203676223754883 ], [ "even", -11.203792572021484 ], [ "gal", -11.204232215881348 ], [ "800", -11.20443058013916 ], [ "▁Jones", -11.204599380493164 ], [ "ova", -11.204681396484375 ], [ "▁careful", -11.204727172851562 ], [ "▁alarm", -11.205070495605469 ], [ "NI", -11.205113410949707 ], [ "▁residence", -11.205327987670898 ], [ "▁wäre", -11.20590877532959 ], [ "▁Dor", -11.205986976623535 ], [ "▁amounts", -11.206369400024414 ], [ "▁mistake", -11.206687927246094 ], [ "ates", -11.206796646118164 ], [ "▁bune", -11.206951141357422 ], [ "▁vegetables", -11.207124710083008 ], [ "▁Ann", -11.207204818725586 ], [ "logical", -11.20776081085205 ], [ "stadt", -11.207806587219238 ], [ "▁chances", -11.207921981811523 ], [ "%)", -11.208030700683594 ], [ "▁minimal", -11.20810604095459 ], [ "▁naturally", -11.20817756652832 ], [ "▁Geld", -11.20822525024414 ], [ "▁Yu", -11.208361625671387 ], [ "▁wrap", -11.20840072631836 ], [ "rest", -11.208674430847168 ], [ "▁legs", -11.208758354187012 ], [ "PM", -11.208806991577148 ], [ "▁Heart", -11.208888053894043 ], [ "▁suspect", -11.209020614624023 ], [ "Go", -11.209098815917969 ], [ "▁Fil", -11.209175109863281 ], [ "▁YOU", -11.209175109863281 ], [ "▁victory", -11.209245681762695 ], [ "pun", -11.20960807800293 ], [ "▁Zo", -11.209632873535156 ], [ "CT", -11.209640502929688 ], [ "▁trim", -11.20969009399414 ], [ "▁stuck", -11.209836959838867 ], [ "ators", -11.209877014160156 ], [ "▁Ideas", -11.210016250610352 ], [ "▁voyage", -11.210166931152344 ], [ "▁Restaurant", -11.210205078125 ], [ "▁pat", -11.210234642028809 ], [ "▁bond", -11.210521697998047 ], [ "▁Del", -11.210552215576172 ], [ "▁fighting", -11.210705757141113 ], [ "▁concerning", -11.210867881774902 ], [ "▁etwa", -11.211141586303711 ], [ "▁Thema", -11.211237907409668 ], [ "▁preferred", -11.211423873901367 ], [ "▁pitch", -11.211465835571289 ], [ "▁Singapore", -11.211971282958984 ], [ "▁tub", -11.212018013000488 ], [ "FT", -11.212053298950195 ], [ "▁Product", -11.21212100982666 ], [ "▁applying", -11.212285995483398 ], [ "▁Fr", -11.212340354919434 ], [ "ţa", -11.212599754333496 ], [ "▁iPad", -11.212861061096191 ], [ "PD", -11.2129545211792 ], [ "▁comun", -11.212995529174805 ], [ "▁pie", -11.213286399841309 ], [ "rank", -11.21364688873291 ], [ "tron", -11.213677406311035 ], [ "▁pest", -11.213906288146973 ], [ "▁herself", -11.213936805725098 ], [ "▁intense", -11.213964462280273 ], [ "foot", -11.21413803100586 ], [ "▁1998", -11.2141695022583 ], [ "▁anxiety", -11.214616775512695 ], [ "▁portable", -11.214674949645996 ], [ "▁harm", -11.214735984802246 ], [ "▁admit", -11.214885711669922 ], [ "sted", -11.214900016784668 ], [ "▁regions", -11.215450286865234 ], [ "cie", -11.215556144714355 ], [ "▁robust", -11.21577262878418 ], [ "▁stem", -11.215982437133789 ], [ "▁roles", -11.216024398803711 ], [ "▁Latin", -11.216224670410156 ], [ "▁Ré", -11.216378211975098 ], [ "▁ref", -11.216381072998047 ], [ "isme", -11.216426849365234 ], [ "▁contribution", -11.216776847839355 ], [ "▁forever", -11.217447280883789 ], [ "▁frei", -11.21754264831543 ], [ "▁mont", -11.217818260192871 ], [ "that", -11.217999458312988 ], [ "▁sensitive", -11.218116760253906 ], [ "▁wider", -11.218175888061523 ], [ "AF", -11.218234062194824 ], [ "▁liability", -11.218748092651367 ], [ "ţiei", -11.219043731689453 ], [ "▁Cho", -11.219260215759277 ], [ "aria", -11.21960735321045 ], [ "rang", -11.21977710723877 ], [ "▁Account", -11.21986198425293 ], [ "▁III", -11.219941139221191 ], [ "▁tooth", -11.220222473144531 ], [ "▁factory", -11.220240592956543 ], [ "▁dropped", -11.220495223999023 ], [ "horn", -11.220780372619629 ], [ "RP", -11.221110343933105 ], [ "▁container", -11.22118091583252 ], [ "fran", -11.221474647521973 ], [ "▁lawyer", -11.221842765808105 ], [ "▁Image", -11.221907615661621 ], [ "HO", -11.22195816040039 ], [ "▁incorporate", -11.221992492675781 ], [ "▁lume", -11.22226333618164 ], [ "GA", -11.222331047058105 ], [ "itati", -11.222370147705078 ], [ "autre", -11.222665786743164 ], [ "ierten", -11.222688674926758 ], [ "[", -11.222746849060059 ], [ "▁packages", -11.222758293151855 ], [ "▁Simon", -11.22290325164795 ], [ "▁somewhat", -11.223734855651855 ], [ "mbo", -11.223737716674805 ], [ "lite", -11.223844528198242 ], [ "▁eliminate", -11.22395133972168 ], [ "▁decrease", -11.224117279052734 ], [ "▁geben", -11.224214553833008 ], [ "▁approaches", -11.224482536315918 ], [ "▁tissue", -11.224940299987793 ], [ "▁personne", -11.225192070007324 ], [ "ional", -11.225587844848633 ], [ "unable", -11.2256498336792 ], [ "▁Case", -11.225736618041992 ], [ "hill", -11.225744247436523 ], [ "och", -11.225862503051758 ], [ "▁minister", -11.225920677185059 ], [ "▁Rad", -11.226285934448242 ], [ "▁yoga", -11.226390838623047 ], [ "▁encounter", -11.22661018371582 ], [ "text", -11.22670841217041 ], [ "▁OS", -11.226719856262207 ], [ "▁opera", -11.22673225402832 ], [ "▁loving", -11.226977348327637 ], [ "▁birds", -11.227363586425781 ], [ "▁prim", -11.227389335632324 ], [ "easca", -11.227432250976562 ], [ "park", -11.227453231811523 ], [ "fü", -11.227797508239746 ], [ "▁champion", -11.227824211120605 ], [ "▁warning", -11.228245735168457 ], [ "DC", -11.228271484375 ], [ "▁yield", -11.228310585021973 ], [ "raum", -11.228334426879883 ], [ "▁Student", -11.228434562683105 ], [ "▁Rev", -11.22848892211914 ], [ "▁Fu", -11.228501319885254 ], [ "▁intra", -11.22854232788086 ], [ "▁proces", -11.228585243225098 ], [ "▁margin", -11.228621482849121 ], [ "lands", -11.228816986083984 ], [ "04", -11.228952407836914 ], [ "▁Steel", -11.229897499084473 ], [ "▁besoin", -11.230081558227539 ], [ "şti", -11.230561256408691 ], [ "▁39", -11.230635643005371 ], [ "▁outcomes", -11.230677604675293 ], [ "wert", -11.230719566345215 ], [ "3,", -11.23080062866211 ], [ "▁hole", -11.230888366699219 ], [ "▁Create", -11.23096752166748 ], [ "▁hall", -11.231266975402832 ], [ "nach", -11.231595039367676 ], [ "▁indicate", -11.232311248779297 ], [ "cum", -11.232604026794434 ], [ "▁Mann", -11.232690811157227 ], [ "▁reaction", -11.232828140258789 ], [ "▁empty", -11.23289680480957 ], [ "▁Sign", -11.232941627502441 ], [ "▁pm", -11.23300838470459 ], [ "erung", -11.23322582244873 ], [ "▁würde", -11.233592987060547 ], [ "▁declarat", -11.233602523803711 ], [ "6%", -11.23371410369873 ], [ "▁Client", -11.23377513885498 ], [ "vil", -11.234295845031738 ], [ "▁electricity", -11.234469413757324 ], [ "▁75", -11.234505653381348 ], [ "▁buna", -11.234505653381348 ], [ "eşte", -11.23473834991455 ], [ "▁prop", -11.234792709350586 ], [ "▁journal", -11.234883308410645 ], [ "▁meu", -11.23495101928711 ], [ "▁chef", -11.235034942626953 ], [ "▁Ever", -11.235102653503418 ], [ "▁feelings", -11.235466003417969 ], [ "PT", -11.23551082611084 ], [ "▁proposal", -11.235651969909668 ], [ "▁Its", -11.235709190368652 ], [ "▁2013.", -11.235795974731445 ], [ "▁Bundes", -11.23595142364502 ], [ "▁droit", -11.236333847045898 ], [ "▁10%", -11.236671447753906 ], [ "gard", -11.236772537231445 ], [ "information", -11.236814498901367 ], [ "FE", -11.237309455871582 ], [ "▁Dun", -11.237340927124023 ], [ "▁Stock", -11.237472534179688 ], [ "ație", -11.2374849319458 ], [ "▁mag", -11.237603187561035 ], [ "▁br", -11.237665176391602 ], [ "▁sight", -11.237772941589355 ], [ "phone", -11.237796783447266 ], [ "▁Cy", -11.237811088562012 ], [ "▁opposite", -11.238035202026367 ], [ "ically", -11.238235473632812 ], [ "großen", -11.238388061523438 ], [ "▁Without", -11.23845100402832 ], [ "espace", -11.238515853881836 ], [ "▁chairs", -11.238595008850098 ], [ "▁matches", -11.238685607910156 ], [ "ateur", -11.238697052001953 ], [ "▁Cost", -11.238699913024902 ], [ "▁WordPress", -11.238880157470703 ], [ "▁Opera", -11.239195823669434 ], [ "walked", -11.239234924316406 ], [ "▁transactions", -11.239521026611328 ], [ "▁nuclear", -11.239579200744629 ], [ "ways", -11.239594459533691 ], [ "▁Oct", -11.239738464355469 ], [ "▁bomb", -11.239835739135742 ], [ "▁tracking", -11.239879608154297 ], [ "▁photograph", -11.240066528320312 ], [ "bio", -11.240309715270996 ], [ "▁branch", -11.240363121032715 ], [ "▁$5", -11.240684509277344 ], [ "▁diagram", -11.240986824035645 ], [ "▁Hard", -11.241218566894531 ], [ "bach", -11.241232872009277 ], [ "▁42", -11.241249084472656 ], [ "logy", -11.241472244262695 ], [ "▁tile", -11.241593360900879 ], [ "▁API", -11.241833686828613 ], [ "seront", -11.24204158782959 ], [ "ENT", -11.242156982421875 ], [ "▁accommodation", -11.242409706115723 ], [ "▁fiber", -11.242438316345215 ], [ "▁Give", -11.242792129516602 ], [ "▁Gas", -11.242916107177734 ], [ "▁Spain", -11.243086814880371 ], [ "▁listing", -11.24312686920166 ], [ "▁blocks", -11.24349308013916 ], [ "▁constitu", -11.243762969970703 ], [ "▁convenience", -11.243797302246094 ], [ "▁prize", -11.243823051452637 ], [ "▁aircraft", -11.24404239654541 ], [ "containing", -11.244124412536621 ], [ "▁vice", -11.244247436523438 ], [ "▁organisations", -11.244304656982422 ], [ "▁complicated", -11.244588851928711 ], [ "rons", -11.244647979736328 ], [ "▁bars", -11.244670867919922 ], [ "était", -11.244705200195312 ], [ "▁checking", -11.245287895202637 ], [ "vant", -11.245542526245117 ], [ "▁couch", -11.245657920837402 ], [ "▁brush", -11.245870590209961 ], [ "▁printer", -11.245922088623047 ], [ "▁Rat", -11.246051788330078 ], [ "▁announce", -11.246057510375977 ], [ "▁salari", -11.246200561523438 ], [ "▁Sk", -11.246356964111328 ], [ "pal", -11.246383666992188 ], [ "▁yards", -11.24658203125 ], [ "▁flexibility", -11.246652603149414 ], [ "▁jamais", -11.24670696258545 ], [ "UC", -11.246740341186523 ], [ "▁4,", -11.246793746948242 ], [ "▁Made", -11.247078895568848 ], [ "▁solche", -11.247113227844238 ], [ "▁tri", -11.247237205505371 ], [ "▁outfit", -11.247243881225586 ], [ "м", -11.247267723083496 ], [ "▁encouraged", -11.247477531433105 ], [ "trac", -11.247552871704102 ], [ "▁genetic", -11.24755859375 ], [ "▁beneficial", -11.247747421264648 ], [ "mă", -11.247849464416504 ], [ "involving", -11.247879028320312 ], [ "▁knee", -11.247879028320312 ], [ "▁respective", -11.248316764831543 ], [ "▁controlled", -11.248350143432617 ], [ "▁Rück", -11.24837589263916 ], [ "LC", -11.248592376708984 ], [ "▁highlight", -11.248634338378906 ], [ "chem", -11.248797416687012 ], [ "▁Bis", -11.24956226348877 ], [ "▁graphics", -11.249592781066895 ], [ "▁posibil", -11.249672889709473 ], [ "orul", -11.249682426452637 ], [ "imagin", -11.249836921691895 ], [ "▁draft", -11.250006675720215 ], [ "shaped", -11.250219345092773 ], [ "▁suggests", -11.250221252441406 ], [ "uvre", -11.250509262084961 ], [ "page", -11.250545501708984 ], [ "▁sentiment", -11.250685691833496 ], [ "▁loop", -11.251015663146973 ], [ "▁Quality", -11.251839637756348 ], [ "▁volunteers", -11.251869201660156 ], [ "▁representation", -11.251923561096191 ], [ "▁examination", -11.252134323120117 ], [ "▁(2)", -11.252225875854492 ], [ "assi", -11.252435684204102 ], [ "▁till", -11.252486228942871 ], [ "▁Catholic", -11.252618789672852 ], [ "▁2020", -11.252726554870605 ], [ "▁random", -11.252764701843262 ], [ "tage", -11.253146171569824 ], [ "▁baking", -11.253690719604492 ], [ "▁Musik", -11.253852844238281 ], [ "▁SC", -11.253867149353027 ], [ "▁möchte", -11.254390716552734 ], [ "▁gene", -11.254411697387695 ], [ "▁kam", -11.254928588867188 ], [ "▁inspire", -11.254974365234375 ], [ "unk", -11.255097389221191 ], [ "▁Final", -11.255477905273438 ], [ "▁jeden", -11.255497932434082 ], [ "▁LLC", -11.255962371826172 ], [ "▁sistem", -11.25613784790039 ], [ "▁stages", -11.256441116333008 ], [ "▁texture", -11.256613731384277 ], [ "rib", -11.256739616394043 ], [ "lung", -11.256782531738281 ], [ "▁breath", -11.256814002990723 ], [ "▁hosted", -11.256844520568848 ], [ "▁Kingdom", -11.257079124450684 ], [ "▁politics", -11.257121086120605 ], [ "▁mood", -11.257122993469238 ], [ "cam", -11.257285118103027 ], [ "▁liked", -11.257287979125977 ], [ "▁Credit", -11.257304191589355 ], [ "tisch", -11.257527351379395 ], [ "▁everywhere", -11.257692337036133 ], [ "▁poti", -11.257915496826172 ], [ "▁fruits", -11.258264541625977 ], [ "oire", -11.258322715759277 ], [ "▁mesure", -11.258586883544922 ], [ "▁Studies", -11.258838653564453 ], [ "▁provision", -11.25888729095459 ], [ "▁Maria", -11.258927345275879 ], [ "▁necessarily", -11.259103775024414 ], [ "▁Net", -11.259212493896484 ], [ "▁scar", -11.259307861328125 ], [ "▁tracks", -11.259424209594727 ], [ "▁ads", -11.259856224060059 ], [ "termin", -11.259861946105957 ], [ "▁Yo", -11.26022720336914 ], [ "atory", -11.260252952575684 ], [ "itoare", -11.26025676727295 ], [ "▁colours", -11.260563850402832 ], [ "▁correctly", -11.260817527770996 ], [ "▁Trade", -11.26090145111084 ], [ "▁Week", -11.261052131652832 ], [ "▁Premier", -11.261499404907227 ], [ "▁designers", -11.261600494384766 ], [ "▁BE", -11.261879920959473 ], [ "▁desktop", -11.261929512023926 ], [ "▁lifetime", -11.262046813964844 ], [ "▁Kind", -11.26213264465332 ], [ "▁divers", -11.262246131896973 ], [ "rain", -11.262260437011719 ], [ "▁Von", -11.262263298034668 ], [ "▁bal", -11.262568473815918 ], [ "▁shots", -11.262624740600586 ], [ "▁accommodate", -11.262767791748047 ], [ "▁Paper", -11.263001441955566 ], [ "▁interaction", -11.263191223144531 ], [ "▁acquisition", -11.263233184814453 ], [ "▁neuro", -11.26378345489502 ], [ "▁institution", -11.26391887664795 ], [ "▁automatic", -11.26403522491455 ], [ "▁assess", -11.264177322387695 ], [ "▁manifest", -11.264199256896973 ], [ "▁audit", -11.264202117919922 ], [ "▁câte", -11.264406204223633 ], [ "▁insight", -11.264533996582031 ], [ "▁lange", -11.264781951904297 ], [ "▁retirement", -11.264795303344727 ], [ "sons", -11.264864921569824 ], [ "▁Asian", -11.26492691040039 ], [ "▁rail", -11.264978408813477 ], [ "▁Awards", -11.264982223510742 ], [ "Avec", -11.265035629272461 ], [ "SO", -11.26511287689209 ], [ "para", -11.265304565429688 ], [ "▁tant", -11.265562057495117 ], [ "▁strike", -11.265693664550781 ], [ "▁transformation", -11.265742301940918 ], [ "▁leicht", -11.26586627960205 ], [ "л", -11.265996932983398 ], [ "fat", -11.26629638671875 ], [ "▁Qui", -11.266626358032227 ], [ "▁chip", -11.26663589477539 ], [ "titude", -11.266640663146973 ], [ "▁Projekt", -11.266998291015625 ], [ "▁statt", -11.267010688781738 ], [ "▁findet", -11.267184257507324 ], [ "▁telephone", -11.267251968383789 ], [ "▁staying", -11.267267227172852 ], [ "▁Mess", -11.267353057861328 ], [ "▁patio", -11.267382621765137 ], [ "▁afla", -11.267890930175781 ], [ "▁administrative", -11.267910957336426 ], [ "▁gemeinsam", -11.268129348754883 ], [ "▁suppliers", -11.268136024475098 ], [ "ark", -11.268181800842285 ], [ "▁rice", -11.268397331237793 ], [ "▁stretch", -11.268439292907715 ], [ "▁compact", -11.268651008605957 ], [ "fire", -11.268756866455078 ], [ "в", -11.268963813781738 ], [ "vision", -11.269035339355469 ], [ "▁Mag", -11.269368171691895 ], [ "▁dreams", -11.269472122192383 ], [ "▁funny", -11.26968765258789 ], [ "▁lässt", -11.270216941833496 ], [ "cade", -11.270448684692383 ], [ "▁drama", -11.270484924316406 ], [ "▁schimb", -11.270767211914062 ], [ "PO", -11.270785331726074 ], [ "▁Sim", -11.270806312561035 ], [ "▁motivation", -11.271045684814453 ], [ "▁presents", -11.27138614654541 ], [ "▁1997", -11.271828651428223 ], [ "agi", -11.271883010864258 ], [ "▁optimal", -11.27198314666748 ], [ "▁folder", -11.271995544433594 ], [ "stro", -11.272034645080566 ], [ "▁Han", -11.272072792053223 ], [ "▁Ei", -11.27220344543457 ], [ "▁pus", -11.272356986999512 ], [ "▁Learning", -11.272531509399414 ], [ "oop", -11.272603034973145 ], [ "▁Type", -11.272658348083496 ], [ "space", -11.272665023803711 ], [ "▁define", -11.273098945617676 ], [ "▁plug", -11.273098945617676 ], [ "yard", -11.273188591003418 ], [ "▁utility", -11.273297309875488 ], [ "über", -11.273561477661133 ], [ "▁commun", -11.273627281188965 ], [ "▁directed", -11.273842811584473 ], [ "▁consent", -11.273893356323242 ], [ "▁DNA", -11.274068832397461 ], [ "▁statements", -11.274130821228027 ], [ "real", -11.274298667907715 ], [ "active", -11.274430274963379 ], [ "school", -11.274965286254883 ], [ "▁mic", -11.275360107421875 ], [ "▁acestui", -11.275467872619629 ], [ "scale", -11.27550220489502 ], [ "▁Mid", -11.275628089904785 ], [ "▁Chair", -11.275874137878418 ], [ "к", -11.275936126708984 ], [ "▁Bas", -11.27630615234375 ], [ "▁38", -11.276379585266113 ], [ "erin", -11.276461601257324 ], [ "▁Everyone", -11.27686882019043 ], [ "COM", -11.276907920837402 ], [ "▁chronic", -11.277079582214355 ], [ "▁doctors", -11.277222633361816 ], [ "▁sh", -11.277276039123535 ], [ "sport", -11.27740478515625 ], [ "▁volunteer", -11.277512550354004 ], [ "▁drinking", -11.277839660644531 ], [ "▁Mas", -11.277868270874023 ], [ "▁pursue", -11.2780122756958 ], [ "▁exposed", -11.278536796569824 ], [ "exe", -11.278660774230957 ], [ "hung", -11.278841972351074 ], [ "▁Tier", -11.278921127319336 ], [ "▁plac", -11.279121398925781 ], [ "▁proiect", -11.279136657714844 ], [ "▁literally", -11.279288291931152 ], [ "▁acolo", -11.279412269592285 ], [ "▁User", -11.279485702514648 ], [ "UT", -11.279598236083984 ], [ "▁hyper", -11.279623985290527 ], [ "▁seed", -11.279794692993164 ], [ "▁literature", -11.2802734375 ], [ "▁Holy", -11.280373573303223 ], [ "▁jeu", -11.280396461486816 ], [ "▁licensed", -11.280896186828613 ], [ "station", -11.280900955200195 ], [ "▁criteria", -11.281292915344238 ], [ "▁sufficient", -11.281292915344238 ], [ "▁gestion", -11.281512260437012 ], [ "▁pic", -11.281549453735352 ], [ "▁64", -11.28170108795166 ], [ "▁facts", -11.281905174255371 ], [ "▁Bild", -11.282098770141602 ], [ "obi", -11.28212833404541 ], [ "▁nie", -11.282362937927246 ], [ "▁Jewish", -11.282756805419922 ], [ "bor", -11.28281307220459 ], [ "▁1980", -11.28286361694336 ], [ "▁Fach", -11.282917976379395 ], [ "craft", -11.283047676086426 ], [ "▁Pakistan", -11.283408164978027 ], [ "▁Mos", -11.283621788024902 ], [ "▁toilet", -11.283844947814941 ], [ "partea", -11.28391170501709 ], [ "case", -11.284221649169922 ], [ "▁clock", -11.28430461883545 ], [ "▁parc", -11.284602165222168 ], [ "▁legislation", -11.284692764282227 ], [ "▁icon", -11.284933090209961 ], [ "etz", -11.285178184509277 ], [ "ept", -11.285270690917969 ], [ "▁Corporation", -11.28585433959961 ], [ "▁requested", -11.285983085632324 ], [ "▁column", -11.286088943481445 ], [ "rier", -11.286120414733887 ], [ "uß", -11.2861967086792 ], [ "▁wohl", -11.286418914794922 ], [ "tell", -11.286569595336914 ], [ "gno", -11.286608695983887 ], [ "▁diseases", -11.286726951599121 ], [ "Sch", -11.286762237548828 ], [ "▁colon", -11.287075996398926 ], [ "▁Based", -11.28709602355957 ], [ "▁flu", -11.28725528717041 ], [ "▁vocal", -11.287408828735352 ], [ "▁virus", -11.287693977355957 ], [ "▁traveling", -11.287750244140625 ], [ "bul", -11.287837982177734 ], [ "т", -11.28794002532959 ], [ "city", -11.287961959838867 ], [ "AU", -11.287991523742676 ], [ "wide", -11.288037300109863 ], [ "▁solo", -11.288061141967773 ], [ "▁functionality", -11.288214683532715 ], [ "▁reveal", -11.28831672668457 ], [ "sign", -11.288952827453613 ], [ "▁closing", -11.288971900939941 ], [ "▁peak", -11.289087295532227 ], [ "▁practic", -11.289398193359375 ], [ "than", -11.289473533630371 ], [ "▁driven", -11.289484977722168 ], [ "êtes", -11.289548873901367 ], [ "high", -11.290016174316406 ], [ "power", -11.290226936340332 ], [ "▁Lin", -11.29028606414795 ], [ "▁dose", -11.29034423828125 ], [ "▁pocket", -11.290650367736816 ], [ "▁Classic", -11.29067611694336 ], [ "▁packaging", -11.290792465209961 ], [ "▁distinct", -11.290800094604492 ], [ "▁côté", -11.291094779968262 ], [ "▁breast", -11.29127025604248 ], [ "▁folosit", -11.29133129119873 ], [ "▁drinks", -11.291353225708008 ], [ "▁Dog", -11.291529655456543 ], [ "ailleurs", -11.291658401489258 ], [ "▁caz", -11.291804313659668 ], [ "▁escape", -11.29188346862793 ], [ "▁warranty", -11.291902542114258 ], [ "▁pulled", -11.291996955871582 ], [ "data", -11.292088508605957 ], [ "▁facilitate", -11.292213439941406 ], [ "É", -11.292335510253906 ], [ "▁SP", -11.292403221130371 ], [ "lant", -11.292557716369629 ], [ "AD", -11.29256534576416 ], [ "▁Print", -11.292802810668945 ], [ "mond", -11.292863845825195 ], [ "▁strange", -11.292875289916992 ], [ "▁Hor", -11.293227195739746 ], [ "▁Collection", -11.293328285217285 ], [ "arm", -11.29346752166748 ], [ "cas", -11.293691635131836 ], [ "arrow", -11.29379940032959 ], [ "▁carrying", -11.293927192687988 ], [ "▁wave", -11.294661521911621 ], [ "setzt", -11.294907569885254 ], [ "▁construct", -11.29514217376709 ], [ "▁acts", -11.295269966125488 ], [ "▁Action", -11.295342445373535 ], [ "▁Kim", -11.295354843139648 ], [ "oxid", -11.295459747314453 ], [ "fish", -11.295519828796387 ], [ "▁damaged", -11.295660018920898 ], [ "▁Greek", -11.295747756958008 ], [ "▁belt", -11.295772552490234 ], [ "▁Prior", -11.295778274536133 ], [ "▁marks", -11.295936584472656 ], [ "▁lumea", -11.296183586120605 ], [ "▁twenty", -11.296196937561035 ], [ "▁locul", -11.296360969543457 ], [ "▁Army", -11.296524047851562 ], [ "apt", -11.296602249145508 ], [ "▁limits", -11.296733856201172 ], [ "▁cruise", -11.296966552734375 ], [ "▁List", -11.296998023986816 ], [ "utilisation", -11.29753589630127 ], [ "▁personality", -11.297622680664062 ], [ "▁sections", -11.297759056091309 ], [ "▁drawn", -11.29797649383545 ], [ "▁mold", -11.298277854919434 ], [ "▁Think", -11.298333168029785 ], [ "▁holidays", -11.298355102539062 ], [ "▁critic", -11.298545837402344 ], [ "grade", -11.298660278320312 ], [ "▁sick", -11.299074172973633 ], [ "▁characteristics", -11.299237251281738 ], [ "▁echipa", -11.299272537231445 ], [ "▁Fast", -11.29929256439209 ], [ "▁Br", -11.299600601196289 ], [ "▁Reise", -11.299734115600586 ], [ "teen", -11.299749374389648 ], [ "uci", -11.299949645996094 ], [ "!”", -11.300180435180664 ], [ "ppe", -11.300532341003418 ], [ "▁talked", -11.301164627075195 ], [ "▁gap", -11.301473617553711 ], [ "homme", -11.301778793334961 ], [ "▁interact", -11.301934242248535 ], [ "▁dollar", -11.302276611328125 ], [ "▁bone", -11.302309036254883 ], [ "▁Einsatz", -11.302343368530273 ], [ "▁sad", -11.302434921264648 ], [ "any", -11.302445411682129 ], [ "tation", -11.302666664123535 ], [ "▁Haupt", -11.302748680114746 ], [ "iva", -11.302781105041504 ], [ "▁Schu", -11.302916526794434 ], [ "▁evaluate", -11.3036470413208 ], [ "▁variant", -11.303807258605957 ], [ "▁IS", -11.303879737854004 ], [ "▁PRO", -11.303947448730469 ], [ "▁vine", -11.303959846496582 ], [ "rut", -11.304062843322754 ], [ "▁existence", -11.30443286895752 ], [ "-7", -11.304525375366211 ], [ "ancy", -11.304702758789062 ], [ "▁Want", -11.305023193359375 ], [ "alism", -11.305127143859863 ], [ "ranging", -11.30550765991211 ], [ "preis", -11.305551528930664 ], [ "All", -11.305620193481445 ], [ "▁reception", -11.30565071105957 ], [ "mai", -11.305730819702148 ], [ "▁lease", -11.30577278137207 ], [ "▁finest", -11.30578899383545 ], [ "▁evident", -11.305874824523926 ], [ "▁Easy", -11.306075096130371 ], [ "▁gilt", -11.306085586547852 ], [ "▁trips", -11.306344985961914 ], [ "▁skilled", -11.306368827819824 ], [ "consists", -11.306456565856934 ], [ "front", -11.306635856628418 ], [ "rati", -11.306652069091797 ], [ "▁Following", -11.30678653717041 ], [ "▁Medicine", -11.307161331176758 ], [ "▁pune", -11.30729866027832 ], [ "▁errors", -11.307354927062988 ], [ "arian", -11.307613372802734 ], [ "lib", -11.30811882019043 ], [ "SR", -11.308351516723633 ], [ "ML", -11.308568000793457 ], [ "▁Safety", -11.308823585510254 ], [ "▁clar", -11.309355735778809 ], [ "New", -11.309764862060547 ], [ "▁37", -11.309773445129395 ], [ "▁Administration", -11.309823036193848 ], [ "▁2.0", -11.310120582580566 ], [ "▁obviously", -11.310196876525879 ], [ "▁Mitarbeiter", -11.310254096984863 ], [ "▁improvements", -11.31043529510498 ], [ "▁Cut", -11.310630798339844 ], [ "▁Natural", -11.310672760009766 ], [ "▁arrival", -11.311182975769043 ], [ "▁pizza", -11.311339378356934 ], [ "eşti", -11.311570167541504 ], [ "cept", -11.311654090881348 ], [ "▁livre", -11.311686515808105 ], [ "▁nombreux", -11.312195777893066 ], [ "▁authentic", -11.312231063842773 ], [ "▁gemacht", -11.312472343444824 ], [ "▁broadcast", -11.312478065490723 ], [ "▁stronger", -11.312545776367188 ], [ "UP", -11.31257152557373 ], [ "▁centers", -11.312614440917969 ], [ "▁petite", -11.312617301940918 ], [ "▁spots", -11.312626838684082 ], [ "▁crystal", -11.312756538391113 ], [ "▁salon", -11.313044548034668 ], [ "▁gained", -11.313098907470703 ], [ "▁Mus", -11.313215255737305 ], [ "▁lens", -11.313223838806152 ], [ "▁ihm", -11.313231468200684 ], [ "minute", -11.313573837280273 ], [ "▁greatly", -11.313587188720703 ], [ "LP", -11.31361198425293 ], [ "rait", -11.314027786254883 ], [ "▁bid", -11.314154624938965 ], [ "▁cit", -11.314203262329102 ], [ "entreprise", -11.31435775756836 ], [ "▁55", -11.314533233642578 ], [ "▁respectively", -11.314536094665527 ], [ "▁lo", -11.314638137817383 ], [ "▁cons", -11.314743995666504 ], [ "▁Energie", -11.315169334411621 ], [ "▁OK", -11.31521224975586 ], [ "▁grill", -11.315338134765625 ], [ "▁heading", -11.31549072265625 ], [ "▁sollten", -11.315491676330566 ], [ "▁Fragen", -11.315528869628906 ], [ "▁Poli", -11.315556526184082 ], [ "▁studying", -11.315723419189453 ], [ "▁développement", -11.315882682800293 ], [ "▁foam", -11.316035270690918 ], [ "▁1996", -11.316511154174805 ], [ "▁disaster", -11.31662654876709 ], [ "▁cafe", -11.317262649536133 ], [ "▁moves", -11.317267417907715 ], [ "focuses", -11.317712783813477 ], [ "▁Avenue", -11.317834854125977 ], [ "▁humans", -11.31784439086914 ], [ "▁(3", -11.318021774291992 ], [ "▁région", -11.318347930908203 ], [ "▁DJ", -11.318608283996582 ], [ "shop", -11.318819046020508 ], [ "▁acting", -11.318843841552734 ], [ "▁Justice", -11.318967819213867 ], [ "▁trouve", -11.319010734558105 ], [ "▁Estate", -11.319040298461914 ], [ "▁strict", -11.319231986999512 ], [ "▁talks", -11.319283485412598 ], [ "▁mat", -11.319290161132812 ], [ "▁completion", -11.319327354431152 ], [ "delivering", -11.31943416595459 ], [ "CD", -11.31973934173584 ], [ "0%", -11.319960594177246 ], [ "▁creativity", -11.320253372192383 ], [ "BR", -11.320272445678711 ], [ "▁occurred", -11.320357322692871 ], [ "Car", -11.320590019226074 ], [ "▁rising", -11.320761680603027 ], [ "gger", -11.32086181640625 ], [ "▁Gene", -11.320901870727539 ], [ "▁workplace", -11.320914268493652 ], [ "phy", -11.321065902709961 ], [ "▁Bla", -11.32107162475586 ], [ "▁trailer", -11.32120418548584 ], [ "▁Forest", -11.321205139160156 ], [ "▁profession", -11.321246147155762 ], [ "▁Father", -11.32137680053711 ], [ "flu", -11.321487426757812 ], [ "tone", -11.321489334106445 ], [ "▁sexual", -11.321736335754395 ], [ "▁Map", -11.321805953979492 ], [ "OT", -11.3218412399292 ], [ "▁Us", -11.321878433227539 ], [ "tôt", -11.321892738342285 ], [ "▁Wert", -11.321901321411133 ], [ "preparing", -11.322121620178223 ], [ "isé", -11.322243690490723 ], [ "▁lake", -11.322461128234863 ], [ "eed", -11.32270336151123 ], [ "jun", -11.322888374328613 ], [ "▁implemented", -11.323014259338379 ], [ "vid", -11.323116302490234 ], [ "igne", -11.323201179504395 ], [ "▁follows", -11.323214530944824 ], [ "▁Eric", -11.323430061340332 ], [ "body", -11.323530197143555 ], [ "▁contained", -11.323585510253906 ], [ "▁massage", -11.323715209960938 ], [ "AV", -11.323725700378418 ], [ "▁insa", -11.323850631713867 ], [ "▁observed", -11.323892593383789 ], [ "▁marque", -11.324137687683105 ], [ "lines", -11.324451446533203 ], [ "▁Frage", -11.324482917785645 ], [ "largely", -11.324647903442383 ], [ "gegeben", -11.32473087310791 ], [ "▁colleagues", -11.324762344360352 ], [ "pha", -11.32494068145752 ], [ "▁representative", -11.325217247009277 ], [ "▁shut", -11.325650215148926 ], [ "▁secondary", -11.325779914855957 ], [ "▁exhibit", -11.325927734375 ], [ "1)", -11.325932502746582 ], [ "mid", -11.326109886169434 ], [ "▁Due", -11.326229095458984 ], [ "▁initiatives", -11.326457023620605 ], [ "▁occurs", -11.326458930969238 ], [ "lent", -11.326478958129883 ], [ "▁façon", -11.326778411865234 ], [ "▁iOS", -11.326803207397461 ], [ "▁exploring", -11.327000617980957 ], [ "▁stations", -11.327103614807129 ], [ "nton", -11.327234268188477 ], [ "▁Country", -11.32729721069336 ], [ "▁shouldn", -11.327406883239746 ], [ "▁casual", -11.327611923217773 ], [ "-18", -11.32769775390625 ], [ "▁maintained", -11.32772445678711 ], [ "▁cart", -11.327790260314941 ], [ "▁propre", -11.327836036682129 ], [ "▁asset", -11.327948570251465 ], [ "firm", -11.32803726196289 ], [ "gla", -11.328231811523438 ], [ "viv", -11.3282470703125 ], [ "▁scientists", -11.328873634338379 ], [ "▁Nor", -11.328936576843262 ], [ "ites", -11.329320907592773 ], [ "▁engaging", -11.329933166503906 ], [ "My", -11.330178260803223 ], [ "▁workshops", -11.330282211303711 ], [ "ffer", -11.3303804397583 ], [ "activité", -11.33047103881836 ], [ "▁tension", -11.330567359924316 ], [ "▁dual", -11.330668449401855 ], [ "uer", -11.33084774017334 ], [ "900", -11.330941200256348 ], [ "SF", -11.33108139038086 ], [ "▁kannst", -11.331146240234375 ], [ "▁bur", -11.33115291595459 ], [ "▁visitor", -11.331156730651855 ], [ "▁granted", -11.331178665161133 ], [ "▁union", -11.331355094909668 ], [ "▁tablet", -11.331461906433105 ], [ "▁Choose", -11.33146858215332 ], [ "ibil", -11.331551551818848 ], [ "▁settlement", -11.331830978393555 ], [ "genommen", -11.331892967224121 ], [ "▁marked", -11.332956314086914 ], [ "▁diagnostic", -11.333370208740234 ], [ "▁prayer", -11.333529472351074 ], [ "▁Toronto", -11.334035873413086 ], [ "trans", -11.334146499633789 ], [ "▁respectiv", -11.334160804748535 ], [ "▁2012.", -11.334207534790039 ], [ "icul", -11.334394454956055 ], [ "▁satisfied", -11.334527969360352 ], [ "▁Fla", -11.334596633911133 ], [ "▁estimate", -11.334638595581055 ], [ "▁Agency", -11.33466911315918 ], [ "OD", -11.334708213806152 ], [ "▁McC", -11.334746360778809 ], [ "bert", -11.334748268127441 ], [ "▁seal", -11.334771156311035 ], [ "aine", -11.334839820861816 ], [ "▁cauza", -11.334848403930664 ], [ "▁wallpaper", -11.335081100463867 ], [ "▁alb", -11.33536434173584 ], [ "▁Sound", -11.335681915283203 ], [ "worth", -11.33572769165039 ], [ "chten", -11.335858345031738 ], [ "programm", -11.335896492004395 ], [ "▁pounds", -11.336215019226074 ], [ "▁coaching", -11.336278915405273 ], [ "▁Furthermore", -11.336454391479492 ], [ "▁Korea", -11.336471557617188 ], [ "▁flour", -11.336530685424805 ], [ "▁sommes", -11.33657169342041 ], [ "▁Repair", -11.33661937713623 ], [ "”)", -11.336642265319824 ], [ "itch", -11.336675643920898 ], [ "blu", -11.336786270141602 ], [ "zar", -11.336882591247559 ], [ "▁diferite", -11.33745002746582 ], [ "▁Golf", -11.337685585021973 ], [ "arch", -11.33772087097168 ], [ "▁panels", -11.337799072265625 ], [ "jan", -11.337956428527832 ], [ "“.", -11.338240623474121 ], [ "izarea", -11.338324546813965 ], [ "▁golden", -11.33854866027832 ], [ "▁flying", -11.338550567626953 ], [ "▁museum", -11.338700294494629 ], [ "▁equivalent", -11.338759422302246 ], [ "▁Lang", -11.339032173156738 ], [ "schi", -11.339539527893066 ], [ "MI", -11.339595794677734 ], [ "▁faci", -11.339838027954102 ], [ "▁Rahmen", -11.339988708496094 ], [ "▁attending", -11.340130805969238 ], [ "′′", -11.340483665466309 ], [ "▁Tro", -11.341070175170898 ], [ "▁gaming", -11.341447830200195 ], [ "▁aujourd", -11.341479301452637 ], [ "▁Wochen", -11.341526985168457 ], [ "▁entering", -11.341535568237305 ], [ "its", -11.34155559539795 ], [ "▁Private", -11.341866493225098 ], [ "▁Ocean", -11.34188175201416 ], [ "▁01", -11.342098236083984 ], [ "▁coloring", -11.342188835144043 ], [ "ător", -11.34253215789795 ], [ "▁flooring", -11.342548370361328 ], [ "▁downtown", -11.34276294708252 ], [ "rab", -11.342998504638672 ], [ "HI", -11.343221664428711 ], [ "▁illness", -11.343234062194824 ], [ "▁whil", -11.343307495117188 ], [ "▁diamond", -11.34333324432373 ], [ "Mail", -11.343419075012207 ], [ "▁Dream", -11.34344482421875 ], [ "▁Golden", -11.344099044799805 ], [ "▁rein", -11.344220161437988 ], [ "▁hi", -11.344283103942871 ], [ "▁expressed", -11.344489097595215 ], [ "▁luat", -11.344511985778809 ], [ "▁Share", -11.34453010559082 ], [ "▁Programm", -11.344706535339355 ], [ "▁Sales", -11.344707489013672 ], [ "▁prof", -11.344890594482422 ], [ "▁MO", -11.34505844116211 ], [ "▁Short", -11.345088958740234 ], [ "▁charm", -11.345290184020996 ], [ "▁Cer", -11.345373153686523 ], [ "▁Run", -11.34553337097168 ], [ "▁tutorial", -11.345589637756348 ], [ "oul", -11.34561824798584 ], [ "▁Fest", -11.345794677734375 ], [ "▁uniform", -11.345929145812988 ], [ "aß", -11.346014976501465 ], [ "▁pipe", -11.346076965332031 ], [ "▁Square", -11.346283912658691 ], [ "▁Kosten", -11.346365928649902 ], [ "▁checked", -11.346590042114258 ], [ "▁65", -11.346626281738281 ], [ "▁Adam", -11.346686363220215 ], [ "cel", -11.346700668334961 ], [ "ello", -11.346965789794922 ], [ "▁Res", -11.347023963928223 ], [ "▁drain", -11.34708309173584 ], [ "ză", -11.347129821777344 ], [ "▁Tech", -11.34739875793457 ], [ "▁strive", -11.34749698638916 ], [ "cycl", -11.347506523132324 ], [ "▁stark", -11.347541809082031 ], [ "load", -11.34754753112793 ], [ "▁Stat", -11.347589492797852 ], [ "▁Rec", -11.347622871398926 ], [ "ians", -11.347716331481934 ], [ "▁Tin", -11.347738265991211 ], [ "▁Agreement", -11.347840309143066 ], [ "▁pret", -11.348027229309082 ], [ "-9", -11.348326683044434 ], [ "▁sentence", -11.348380088806152 ], [ "▁Direct", -11.348426818847656 ], [ "▁Rep", -11.348465919494629 ], [ "▁Prozent", -11.348799705505371 ], [ "▁invitation", -11.34882640838623 ], [ "▁refund", -11.349113464355469 ], [ "▁Kids", -11.349287986755371 ], [ "stock", -11.349383354187012 ], [ "TP", -11.349400520324707 ], [ "▁tau", -11.34941291809082 ], [ "from", -11.349421501159668 ], [ "▁Ash", -11.349451065063477 ], [ "store", -11.349535942077637 ], [ "▁Common", -11.34958553314209 ], [ "▁Qualität", -11.34968376159668 ], [ "▁strongly", -11.349727630615234 ], [ "▁importante", -11.34979248046875 ], [ "ome", -11.349912643432617 ], [ "▁surtout", -11.349946022033691 ], [ "enables", -11.35020637512207 ], [ "▁decent", -11.350221633911133 ], [ "▁neutral", -11.350237846374512 ], [ "▁produs", -11.350356101989746 ], [ "bury", -11.350451469421387 ], [ "▁Level", -11.350618362426758 ], [ "▁interes", -11.350699424743652 ], [ "mov", -11.350797653198242 ], [ "▁backup", -11.350939750671387 ], [ "même", -11.351094245910645 ], [ "doc", -11.351119041442871 ], [ "▁ -11.35130786895752 ], [ "▁specified", -11.351495742797852 ], [ "▁founder", -11.351655960083008 ], [ "And", -11.352090835571289 ], [ "isten", -11.352149963378906 ], [ "▁lecture", -11.352729797363281 ], [ "▁wake", -11.352895736694336 ], [ "▁vraiment", -11.352980613708496 ], [ "▁swing", -11.353188514709473 ], [ "▁addresses", -11.353275299072266 ], [ "▁Verfügung", -11.353504180908203 ], [ "▁deadline", -11.353761672973633 ], [ "н", -11.353791236877441 ], [ "▁Content", -11.353970527648926 ], [ "▁Gre", -11.354111671447754 ], [ "▁Experience", -11.354378700256348 ], [ "tura", -11.354458808898926 ], [ "▁exit", -11.354642868041992 ], [ "▁Britain", -11.354652404785156 ], [ "▁Sunt", -11.354684829711914 ], [ "▁documentation", -11.354690551757812 ], [ "▁showcase", -11.3547945022583 ], [ "▁photographs", -11.354822158813477 ], [ "qué", -11.35483169555664 ], [ "zin", -11.354909896850586 ], [ "pres", -11.354933738708496 ], [ "▁decline", -11.354955673217773 ], [ "▁Large", -11.355030059814453 ], [ "▁bills", -11.355141639709473 ], [ "▁entitled", -11.355222702026367 ], [ "▁passionate", -11.355393409729004 ], [ "▁workout", -11.355413436889648 ], [ "▁Again", -11.35560417175293 ], [ "▁Haut", -11.35582160949707 ], [ "▁guaranteed", -11.35599136352539 ], [ "▁vue", -11.35600471496582 ], [ "▁farmers", -11.356224060058594 ], [ "▁admission", -11.356500625610352 ], [ "▁manière", -11.357080459594727 ], [ "▁reverse", -11.357121467590332 ], [ "▁FL", -11.357142448425293 ], [ "▁terminal", -11.357206344604492 ], [ "GI", -11.35731029510498 ], [ "▁speakers", -11.35739803314209 ], [ "▁responses", -11.357398986816406 ], [ "▁Doch", -11.357457160949707 ], [ "▁2013,", -11.357717514038086 ], [ "▁phones", -11.357789993286133 ], [ "ential", -11.357851028442383 ], [ "▁operator", -11.357916831970215 ], [ "▁steam", -11.358036994934082 ], [ "burn", -11.358091354370117 ], [ "▁seul", -11.35815715789795 ], [ "▁unusual", -11.358322143554688 ], [ "▁educate", -11.358403205871582 ], [ "▁Que", -11.358680725097656 ], [ "▁believes", -11.359137535095215 ], [ "▁succeed", -11.359344482421875 ], [ "▁delay", -11.359533309936523 ], [ "▁deeper", -11.359633445739746 ], [ "▁reaching", -11.359890937805176 ], [ "▁objectives", -11.360086441040039 ], [ "▁temporary", -11.36028003692627 ], [ "▁artistic", -11.360421180725098 ], [ "▁sou", -11.360471725463867 ], [ "▁transparent", -11.36062240600586 ], [ "There", -11.360798835754395 ], [ "ception", -11.360836029052734 ], [ "▁excess", -11.360939979553223 ], [ "▁gathering", -11.361008644104004 ], [ "▁Save", -11.361095428466797 ], [ "ază", -11.361166000366211 ], [ "▁français", -11.361197471618652 ], [ "▁laid", -11.361210823059082 ], [ "▁modul", -11.361394882202148 ], [ "avoir", -11.361465454101562 ], [ "under", -11.362113952636719 ], [ "dding", -11.362226486206055 ], [ "▁falls", -11.362232208251953 ], [ "▁Möglichkeit", -11.362369537353516 ], [ "▁ceremony", -11.362370491027832 ], [ "rai", -11.36237621307373 ], [ "▁Bor", -11.362709045410156 ], [ "▁Below", -11.362750053405762 ], [ "4)", -11.362759590148926 ], [ "▁Field", -11.362833023071289 ], [ "wear", -11.362935066223145 ], [ "motion", -11.362948417663574 ], [ "print", -11.363311767578125 ], [ "game", -11.363360404968262 ], [ "▁Irish", -11.363458633422852 ], [ "▁Las", -11.363458633422852 ], [ "Among", -11.363570213317871 ], [ "atori", -11.363580703735352 ], [ "▁ajuns", -11.363837242126465 ], [ "▁alive", -11.363860130310059 ], [ "▁retour", -11.363900184631348 ], [ "▁smoke", -11.3640775680542 ], [ "▁math", -11.364285469055176 ], [ "▁Ye", -11.364337921142578 ], [ "▁Denn", -11.36436653137207 ], [ "▁1995", -11.364412307739258 ], [ "▁bani", -11.364644050598145 ], [ "raz", -11.364998817443848 ], [ "world", -11.365026473999023 ], [ "▁engines", -11.365140914916992 ], [ "nehmen", -11.365192413330078 ], [ "stor", -11.365328788757324 ], [ "▁interpret", -11.365403175354004 ], [ "▁Ven", -11.365489959716797 ], [ "▁cotton", -11.365622520446777 ], [ "▁represented", -11.366004943847656 ], [ "▁fabulous", -11.366166114807129 ], [ "▁gender", -11.366301536560059 ], [ "Mar", -11.366668701171875 ], [ "vic", -11.366991996765137 ], [ "▁newsletter", -11.367432594299316 ], [ "sburg", -11.367574691772461 ], [ "pond", -11.36838436126709 ], [ "▁Carl", -11.368454933166504 ], [ "▁bunch", -11.368714332580566 ], [ "▁tower", -11.368847846984863 ], [ "▁trigger", -11.368976593017578 ], [ "▁explanation", -11.369091033935547 ], [ "Man", -11.369114875793457 ], [ "iunea", -11.369168281555176 ], [ "▁announcement", -11.369492530822754 ], [ "▁seeds", -11.36952018737793 ], [ "▁shell", -11.369865417480469 ], [ "▁Working", -11.36989688873291 ], [ "viz", -11.370267868041992 ], [ "▁Simply", -11.370329856872559 ], [ "sub", -11.37037181854248 ], [ "▁Village", -11.37060832977295 ], [ "▁falling", -11.370742797851562 ], [ "▁fits", -11.37084674835205 ], [ "▁wichtig", -11.37088394165039 ], [ "▁Down", -11.37108039855957 ], [ "bble", -11.371573448181152 ], [ "▁Orange", -11.37165641784668 ], [ "promoting", -11.371932029724121 ], [ "▁rapidly", -11.37217903137207 ], [ "▁translation", -11.372330665588379 ], [ "nig", -11.3723726272583 ], [ "fusion", -11.37240982055664 ], [ "kosten", -11.372611045837402 ], [ "2)", -11.372783660888672 ], [ "▁Express", -11.372958183288574 ], [ "▁Sw", -11.373003959655762 ], [ "▁frequency", -11.373086929321289 ], [ "▁diversity", -11.373348236083984 ], [ "MT", -11.373452186584473 ], [ "▁bekannt", -11.373530387878418 ], [ "lion", -11.373871803283691 ], [ "▁cop", -11.37393856048584 ], [ "▁Customer", -11.374072074890137 ], [ "▁demands", -11.374427795410156 ], [ "▁corn", -11.374516487121582 ], [ "▁Hamburg", -11.374551773071289 ], [ "SD", -11.374628067016602 ], [ "▁Rome", -11.374677658081055 ], [ "▁Pur", -11.374750137329102 ], [ "▁stamp", -11.374885559082031 ], [ "▁grateful", -11.374967575073242 ], [ "RM", -11.37511157989502 ], [ "▁Pl", -11.37511920928955 ], [ "▁Tele", -11.375154495239258 ], [ "▁plugin", -11.375492095947266 ], [ "▁maxim", -11.375675201416016 ], [ "▁Hoch", -11.37574577331543 ], [ "igung", -11.375823020935059 ], [ "▁Entwicklung", -11.375858306884766 ], [ "▁File", -11.375931739807129 ], [ "▁Eastern", -11.376070022583008 ], [ "▁scrap", -11.376331329345703 ], [ "▁acquired", -11.376338958740234 ], [ "sau", -11.376364707946777 ], [ "▁Klein", -11.376452445983887 ], [ "▁milioane", -11.376492500305176 ], [ "▁Stand", -11.376693725585938 ], [ "▁childhood", -11.37671184539795 ], [ "▁artificial", -11.376752853393555 ], [ "▁substantial", -11.376851081848145 ], [ "druck", -11.377315521240234 ], [ "▁Kra", -11.377562522888184 ], [ "▁performances", -11.377645492553711 ], [ "▁row", -11.377824783325195 ], [ "NT", -11.377899169921875 ], [ "mod", -11.377904891967773 ], [ "remained", -11.378399848937988 ], [ "▁nimic", -11.378462791442871 ], [ "▁Limited", -11.378555297851562 ], [ "▁cookie", -11.378718376159668 ], [ "▁retain", -11.378816604614258 ], [ "▁600", -11.379144668579102 ], [ "▁eigene", -11.379158020019531 ], [ "▁tune", -11.379209518432617 ], [ "NS", -11.379256248474121 ], [ "▁dad", -11.379284858703613 ], [ "Moreover", -11.379415512084961 ], [ "ès", -11.379434585571289 ], [ "▁worship", -11.379439353942871 ], [ "▁Material", -11.3794584274292 ], [ "▁verb", -11.379528045654297 ], [ "ziehen", -11.37957763671875 ], [ "lton", -11.379645347595215 ], [ "▁boot", -11.379982948303223 ], [ "plo", -11.380118370056152 ], [ "CF", -11.380212783813477 ], [ "GM", -11.380215644836426 ], [ "▁Mix", -11.38046932220459 ], [ "▁Front", -11.380474090576172 ], [ "▁repairs", -11.380655288696289 ], [ "▁proportion", -11.381068229675293 ], [ "▁habit", -11.381132125854492 ], [ "▁hide", -11.38156509399414 ], [ "focusing", -11.381707191467285 ], [ "▁Annual", -11.381717681884766 ], [ "▁twin", -11.3817777633667 ], [ "▁acord", -11.381780624389648 ], [ "ehr", -11.381814956665039 ], [ "month", -11.382303237915039 ], [ "venir", -11.382535934448242 ], [ "Or", -11.38254165649414 ], [ "awa", -11.382600784301758 ], [ "lass", -11.382735252380371 ], [ "ffe", -11.383048057556152 ], [ "iți", -11.383074760437012 ], [ "NO", -11.3831148147583 ], [ "▁scope", -11.383295059204102 ], [ "▁lowest", -11.383527755737305 ], [ "▁afraid", -11.383572578430176 ], [ "▁subjects", -11.383578300476074 ], [ "▁templates", -11.383586883544922 ], [ "▁jos", -11.383604049682617 ], [ "DM", -11.383687973022461 ], [ "ensemble", -11.383792877197266 ], [ "▁Ski", -11.383941650390625 ], [ "DP", -11.384099960327148 ], [ "▁grip", -11.384171485900879 ], [ "2-", -11.38436222076416 ], [ "▁sécurité", -11.384743690490723 ], [ "▁mono", -11.384749412536621 ], [ "▁controls", -11.384854316711426 ], [ "SV", -11.384879112243652 ], [ "install", -11.384970664978027 ], [ "berry", -11.385042190551758 ], [ "nial", -11.385120391845703 ], [ "shed", -11.385462760925293 ], [ "▁celle", -11.385830879211426 ], [ "FR", -11.385936737060547 ], [ "äng", -11.385950088500977 ], [ "▁gaz", -11.385984420776367 ], [ "êt", -11.386184692382812 ], [ "▁viewing", -11.386412620544434 ], [ "▁asigura", -11.386524200439453 ], [ "bling", -11.3865327835083 ], [ "master", -11.386919975280762 ], [ "▁Fin", -11.387160301208496 ], [ "VC", -11.387365341186523 ], [ "▁patent", -11.387715339660645 ], [ "▁Clean", -11.38773250579834 ], [ "▁1970", -11.387789726257324 ], [ "▁Char", -11.387971878051758 ], [ "thi", -11.388010025024414 ], [ "bli", -11.388141632080078 ], [ "▁haut", -11.388307571411133 ], [ "tica", -11.38836669921875 ], [ "▁venit", -11.388578414916992 ], [ "▁compatible", -11.388678550720215 ], [ "▁hanging", -11.388690948486328 ], [ "UN", -11.388842582702637 ], [ "▁forth", -11.388911247253418 ], [ "▁painted", -11.388912200927734 ], [ "lip", -11.389031410217285 ], [ "▁deeply", -11.389089584350586 ], [ "▁participating", -11.389242172241211 ], [ "▁Iran", -11.38968276977539 ], [ "▁conventional", -11.389769554138184 ], [ "ARE", -11.38985824584961 ], [ "▁accuracy", -11.389896392822266 ], [ "▁Familie", -11.389955520629883 ], [ "▁Dir", -11.39001178741455 ], [ "▁gehen", -11.390127182006836 ], [ "▁moderne", -11.39022159576416 ], [ "▁Iraq", -11.39050579071045 ], [ "▁vente", -11.390582084655762 ], [ "▁Donald", -11.390998840332031 ], [ "▁passer", -11.391051292419434 ], [ "▁mehrere", -11.391267776489258 ], [ "▁Everything", -11.391291618347168 ], [ "▁studied", -11.391307830810547 ], [ "▁acquire", -11.391312599182129 ], [ "für", -11.391477584838867 ], [ "▁gal", -11.391502380371094 ], [ "▁headed", -11.391809463500977 ], [ "▁screening", -11.391865730285645 ], [ "▁findings", -11.392303466796875 ], [ "▁nutrition", -11.392305374145508 ], [ "▁Secretary", -11.392308235168457 ], [ "duct", -11.392431259155273 ], [ "born", -11.392436027526855 ], [ "«", -11.39261531829834 ], [ "▁statistics", -11.392616271972656 ], [ "▁Sydney", -11.392800331115723 ], [ "▁Prof", -11.392829895019531 ], [ "▁dialogue", -11.39327621459961 ], [ "▁gather", -11.393425941467285 ], [ "valu", -11.393746376037598 ], [ "▁currency", -11.394073486328125 ], [ "▁Kat", -11.394092559814453 ], [ "gotten", -11.394189834594727 ], [ "main", -11.39432144165039 ], [ "▁coin", -11.394340515136719 ], [ "▁Nick", -11.394380569458008 ], [ "vă", -11.394658088684082 ], [ "▁Victoria", -11.394832611083984 ], [ "▁conclusion", -11.3949613571167 ], [ "▁lemon", -11.394998550415039 ], [ "▁Article", -11.39516830444336 ], [ "▁necesar", -11.39516830444336 ], [ "mag", -11.395180702209473 ], [ "▁riding", -11.39537239074707 ], [ "▁Eli", -11.395599365234375 ], [ "▁cord", -11.395635604858398 ], [ "wä", -11.39572811126709 ], [ "ußerdem", -11.395737648010254 ], [ "▁Bed", -11.395759582519531 ], [ "▁layers", -11.395833015441895 ], [ "▁harder", -11.395975112915039 ], [ "▁processor", -11.396040916442871 ], [ "▁Ils", -11.39613151550293 ], [ "▁Edition", -11.39615535736084 ], [ "▁Link", -11.396393775939941 ], [ "éré", -11.396461486816406 ], [ "▁nume", -11.396576881408691 ], [ "▁Boy", -11.39659595489502 ], [ "▁equally", -11.396646499633789 ], [ "▁Regel", -11.397119522094727 ], [ "▁hopes", -11.397185325622559 ], [ "odor", -11.397311210632324 ], [ "▁initially", -11.397430419921875 ], [ "▁$4", -11.3974609375 ], [ "▁exemplu", -11.397537231445312 ], [ "▁vari", -11.397565841674805 ], [ "schl", -11.397698402404785 ], [ "▁southern", -11.39809799194336 ], [ "▁mein", -11.39818000793457 ], [ "▁1994", -11.398300170898438 ], [ "▁importantly", -11.398401260375977 ], [ "▁succes", -11.398526191711426 ], [ "▁developer", -11.398598670959473 ], [ "▁lips", -11.39889144897461 ], [ "▁attitude", -11.39900016784668 ], [ "▁Age", -11.399541854858398 ], [ "▁corps", -11.399713516235352 ], [ "▁clicking", -11.39976978302002 ], [ "▁putem", -11.399832725524902 ], [ "▁journée", -11.40003776550293 ], [ "boy", -11.4002103805542 ], [ "▁injured", -11.40028190612793 ], [ "▁watched", -11.400433540344238 ], [ "▁flights", -11.40079116821289 ], [ "turn", -11.400980949401855 ], [ "▁stainless", -11.401562690734863 ], [ "▁besondere", -11.40156364440918 ], [ "▁Tur", -11.401596069335938 ], [ "▁hiring", -11.401650428771973 ], [ "▁roads", -11.401727676391602 ], [ "ificat", -11.401785850524902 ], [ "▁Flor", -11.402045249938965 ], [ "▁puternic", -11.402215003967285 ], [ "▁unexpected", -11.40223503112793 ], [ "▁Est", -11.40238094329834 ], [ "▁adopted", -11.40253734588623 ], [ "▁Fox", -11.402647972106934 ], [ "▁contributions", -11.402870178222656 ], [ "sec", -11.402968406677246 ], [ "IO", -11.403059959411621 ], [ "▁santé", -11.403432846069336 ], [ "▁Tree", -11.403763771057129 ], [ "▁scurt", -11.40381908416748 ], [ "▁Products", -11.403848648071289 ], [ "▁forecast", -11.403998374938965 ], [ "▁actor", -11.404143333435059 ], [ "▁Gallery", -11.404149055480957 ], [ "▁continuous", -11.404163360595703 ], [ "▁Hat", -11.404291152954102 ], [ "▁slip", -11.404501914978027 ], [ "9%", -11.404960632324219 ], [ "▁depression", -11.405043601989746 ], [ "UI", -11.405229568481445 ], [ "abile", -11.405648231506348 ], [ "▁merit", -11.405671119689941 ], [ "▁Fer", -11.405805587768555 ], [ "▁robot", -11.405888557434082 ], [ "▁gel", -11.40589427947998 ], [ "▁gentle", -11.406017303466797 ], [ "▁wanting", -11.406071662902832 ], [ "▁understood", -11.406157493591309 ], [ "▁terrain", -11.406161308288574 ], [ "▁associate", -11.406176567077637 ], [ "▁discussions", -11.40632152557373 ], [ "▁Job", -11.406365394592285 ], [ "spec", -11.406440734863281 ], [ "Dabei", -11.406475067138672 ], [ "etic", -11.406517028808594 ], [ "gol", -11.40654468536377 ], [ "▁20%", -11.406584739685059 ], [ "▁grup", -11.406606674194336 ], [ "▁Doctor", -11.406813621520996 ], [ "verse", -11.407246589660645 ], [ "▁victim", -11.407258033752441 ], [ "ță", -11.407302856445312 ], [ "▁scores", -11.407544136047363 ], [ "▁Policy", -11.407634735107422 ], [ "▁Anna", -11.407736778259277 ], [ "IV", -11.407804489135742 ], [ "▁mineral", -11.408202171325684 ], [ "live", -11.40821647644043 ], [ "▁grey", -11.408368110656738 ], [ "struct", -11.40852165222168 ], [ "▁emails", -11.408738136291504 ], [ "▁anymore", -11.409114837646484 ], [ "▁productivity", -11.409387588500977 ], [ "▁Dark", -11.409463882446289 ], [ "▁neither", -11.409481048583984 ], [ "▁quotes", -11.409611701965332 ], [ "LS", -11.410368919372559 ], [ "▁Arizona", -11.41040325164795 ], [ "night", -11.410497665405273 ], [ "élé", -11.411019325256348 ], [ "▁assigned", -11.411153793334961 ], [ "▁satellite", -11.411328315734863 ], [ "▁stability", -11.411665916442871 ], [ "▁networking", -11.41172981262207 ], [ "▁Transport", -11.411847114562988 ], [ "▁persons", -11.411856651306152 ], [ "fund", -11.412043571472168 ], [ "▁pratique", -11.41213321685791 ], [ "▁inca", -11.412134170532227 ], [ "iller", -11.412349700927734 ], [ "▁packed", -11.41239070892334 ], [ "▁Vegas", -11.412484169006348 ], [ "▁offre", -11.412493705749512 ], [ "▁Bin", -11.412518501281738 ], [ "stop", -11.412609100341797 ], [ "mini", -11.412860870361328 ], [ "▁jam", -11.412877082824707 ], [ "cord", -11.41289234161377 ], [ "▁Beautiful", -11.412996292114258 ], [ "▁trash", -11.413012504577637 ], [ "▁wise", -11.413092613220215 ], [ "▁accounting", -11.413178443908691 ], [ "▁différents", -11.413182258605957 ], [ "▁stil", -11.413214683532715 ], [ "suit", -11.413951873779297 ], [ "▁vier", -11.414209365844727 ], [ "▁permis", -11.414224624633789 ], [ "flow", -11.414238929748535 ], [ "▁col", -11.414749145507812 ], [ "ected", -11.414960861206055 ], [ "▁singer", -11.414999008178711 ], [ "▁GmbH", -11.415038108825684 ], [ "tics", -11.415094375610352 ], [ "▁ser", -11.415159225463867 ], [ "On", -11.415315628051758 ], [ "▁insights", -11.415605545043945 ], [ "BB", -11.415946960449219 ], [ "▁differ", -11.415959358215332 ], [ "▁Glass", -11.416131973266602 ], [ "▁Six", -11.416482925415039 ], [ "▁subscription", -11.416584968566895 ], [ "BC", -11.416606903076172 ], [ "▁returning", -11.416664123535156 ], [ "kleinen", -11.416693687438965 ], [ "▁advantages", -11.416747093200684 ], [ "omme", -11.416852951049805 ], [ "lus", -11.417071342468262 ], [ "now", -11.417141914367676 ], [ "▁Pack", -11.417253494262695 ], [ "▁leak", -11.417333602905273 ], [ "▁muscles", -11.41748332977295 ], [ "▁davon", -11.417492866516113 ], [ "mph", -11.417858123779297 ], [ "▁temple", -11.417868614196777 ], [ "▁Après", -11.417901039123535 ], [ "▁Illinois", -11.41801643371582 ], [ "▁variable", -11.418065071105957 ], [ "▁judgment", -11.418389320373535 ], [ "gran", -11.41861629486084 ], [ "▁pose", -11.418621063232422 ], [ "das", -11.418647766113281 ], [ "ures", -11.418673515319824 ], [ "▁Championship", -11.418689727783203 ], [ "ebenfalls", -11.41872501373291 ], [ "▁hydro", -11.418753623962402 ], [ "▁angle", -11.419268608093262 ], [ "▁5-", -11.41940975189209 ], [ "▁gest", -11.419547080993652 ], [ "▁Frau", -11.420233726501465 ], [ "▁knock", -11.420275688171387 ], [ "FS", -11.420442581176758 ], [ "spi", -11.420577049255371 ], [ "▁Regional", -11.420717239379883 ], [ "lets", -11.421098709106445 ], [ "▁Date", -11.42115592956543 ], [ "▁Finance", -11.421211242675781 ], [ "▁Dann", -11.421320915222168 ], [ "Star", -11.421380043029785 ], [ "▁Creek", -11.421393394470215 ], [ "▁fu", -11.421648979187012 ], [ "wohn", -11.422141075134277 ], [ "▁anniversary", -11.422219276428223 ], [ "▁investments", -11.422292709350586 ], [ "▁universal", -11.422601699829102 ], [ "▁pit", -11.422745704650879 ], [ "ște", -11.422784805297852 ], [ "▁lab", -11.422822952270508 ], [ "dienst", -11.422884941101074 ], [ "▁pal", -11.422889709472656 ], [ "▁graphic", -11.42289924621582 ], [ "▁bearing", -11.422900199890137 ], [ "▁stylish", -11.423087120056152 ], [ "▁mé", -11.42319393157959 ], [ "▁există", -11.42326545715332 ], [ "▁découvrir", -11.423477172851562 ], [ "comp", -11.423606872558594 ], [ "ridge", -11.423667907714844 ], [ "▁heads", -11.423765182495117 ], [ "▁consequences", -11.423835754394531 ], [ "self", -11.423842430114746 ], [ "fried", -11.423870086669922 ], [ "▁inventory", -11.424199104309082 ], [ "▁strip", -11.42422866821289 ], [ "▁Civil", -11.42424488067627 ], [ "bell", -11.424307823181152 ], [ "▁neben", -11.424444198608398 ], [ "▁Perfect", -11.424470901489258 ], [ "▁Notre", -11.424478530883789 ], [ "▁fraud", -11.424630165100098 ], [ "▁employers", -11.424656867980957 ], [ "▁Jackson", -11.42470645904541 ], [ "▁probleme", -11.424915313720703 ], [ "▁richtig", -11.424957275390625 ], [ "▁Method", -11.425009727478027 ], [ "▁tired", -11.425010681152344 ], [ "dies", -11.425031661987305 ], [ "▁Number", -11.425315856933594 ], [ "rland", -11.425652503967285 ], [ "▁latter", -11.426031112670898 ], [ "rendre", -11.426064491271973 ], [ "▁cameras", -11.426095962524414 ], [ "▁euch", -11.426630020141602 ], [ "▁Description", -11.427038192749023 ], [ "Spec", -11.427061080932617 ], [ "▁mile", -11.427437782287598 ], [ "▁Challenge", -11.427474021911621 ], [ "▁Solutions", -11.427504539489746 ], [ "▁trusted", -11.427509307861328 ], [ "▁einge", -11.427515029907227 ], [ "rück", -11.427528381347656 ], [ "▁Ober", -11.427635192871094 ], [ "kes", -11.42764949798584 ], [ "▁Log", -11.427684783935547 ], [ "▁dessert", -11.427776336669922 ], [ "▁murder", -11.428033828735352 ], [ "▁1/2", -11.428311347961426 ], [ "▁Provide", -11.42872142791748 ], [ "nivelul", -11.428800582885742 ], [ "nici", -11.428818702697754 ], [ "▁observe", -11.42889404296875 ], [ "▁prescription", -11.429162979125977 ], [ "▁Sau", -11.429170608520508 ], [ "▁genuine", -11.42919635772705 ], [ "▁operated", -11.429231643676758 ], [ "▁generous", -11.429267883300781 ], [ "▁weapons", -11.429458618164062 ], [ "▁belief", -11.4295015335083 ], [ "▁consum", -11.429584503173828 ], [ "▁unknown", -11.430116653442383 ], [ "deoarece", -11.430135726928711 ], [ "Art", -11.430147171020508 ], [ "▁kurz", -11.430183410644531 ], [ "▁Gut", -11.430258750915527 ], [ "▁medication", -11.430522918701172 ], [ "▁Mau", -11.43058967590332 ], [ "▁divorce", -11.430678367614746 ], [ "▁claimed", -11.430811882019043 ], [ "halten", -11.430848121643066 ], [ "▁Cons", -11.43089485168457 ], [ "▁operational", -11.430975914001465 ], [ "▁Hong", -11.431081771850586 ], [ "VI", -11.431143760681152 ], [ "▁Blick", -11.431485176086426 ], [ "▁lamp", -11.431706428527832 ], [ "pati", -11.431853294372559 ], [ "▁4-", -11.43192195892334 ], [ "▁interven", -11.431964874267578 ], [ "ques", -11.43201732635498 ], [ "▁Talk", -11.432096481323242 ], [ "▁zeigt", -11.432318687438965 ], [ "▁targeted", -11.432390213012695 ], [ "round", -11.432640075683594 ], [ "enfant", -11.432748794555664 ], [ "▁Reg", -11.432836532592773 ], [ "▁instruments", -11.432872772216797 ], [ "▁calcul", -11.433363914489746 ], [ "▁Henry", -11.4335298538208 ], [ "▁Cla", -11.433616638183594 ], [ "▁rack", -11.433661460876465 ], [ "sehen", -11.43375301361084 ], [ "▁ending", -11.433754920959473 ], [ "▁resolve", -11.434130668640137 ], [ "▁advise", -11.434178352355957 ], [ "▁sociale", -11.434386253356934 ], [ "▁cabin", -11.434536933898926 ], [ "▁involve", -11.43480396270752 ], [ "gă", -11.434889793395996 ], [ "▁automat", -11.435132026672363 ], [ "▁consultant", -11.435258865356445 ], [ "Bu", -11.435370445251465 ], [ "▁safely", -11.435466766357422 ], [ "état", -11.435478210449219 ], [ "▁pros", -11.435657501220703 ], [ "▁lies", -11.435659408569336 ], [ "▁Brian", -11.435914993286133 ], [ "▁talented", -11.435954093933105 ], [ "pus", -11.43599796295166 ], [ "▁hub", -11.436060905456543 ], [ "▁Ji", -11.436066627502441 ], [ "▁sought", -11.436102867126465 ], [ "▁energie", -11.436210632324219 ], [ "▁möchten", -11.43634033203125 ], [ "▁11.", -11.436558723449707 ], [ "▁Kong", -11.436662673950195 ], [ "▁grave", -11.43666934967041 ], [ "▁lists", -11.436800956726074 ], [ "tati", -11.436809539794922 ], [ "verschiedenen", -11.43692398071289 ], [ "dam", -11.437061309814453 ], [ "▁charity", -11.437249183654785 ], [ "▁breaking", -11.43735122680664 ], [ "kins", -11.43747329711914 ], [ "▁könnte", -11.437517166137695 ], [ "▁appointed", -11.437532424926758 ], [ "roc", -11.4376859664917 ], [ "▁Senate", -11.437979698181152 ], [ "wit", -11.438002586364746 ], [ "▁emerging", -11.438162803649902 ], [ "▁année", -11.438288688659668 ], [ "▁Cool", -11.438365936279297 ], [ "▁sensor", -11.43842887878418 ], [ "How", -11.438488960266113 ], [ "▁Ryan", -11.438626289367676 ], [ "▁computers", -11.43871784210205 ], [ "▁fault", -11.4388427734375 ], [ "▁présent", -11.438843727111816 ], [ "ulation", -11.439149856567383 ], [ "▁stir", -11.439348220825195 ], [ "lauf", -11.439703941345215 ], [ "▁AI", -11.440389633178711 ], [ "▁Bri", -11.440438270568848 ], [ "▁bain", -11.441011428833008 ], [ "▁5,", -11.441287994384766 ], [ "schein", -11.44157886505127 ], [ "▁weiß", -11.441596031188965 ], [ "▁possibilities", -11.44235610961914 ], [ "gur", -11.442413330078125 ], [ "▁hinter", -11.442647933959961 ], [ "Innen", -11.442755699157715 ], [ "▁vorba", -11.442992210388184 ], [ "fahren", -11.443008422851562 ], [ "▁Cell", -11.443072319030762 ], [ "univers", -11.443137168884277 ], [ "▁Follow", -11.443424224853516 ], [ "▁emotions", -11.44360637664795 ], [ "▁Ministry", -11.443694114685059 ], [ "▁curriculum", -11.443694114685059 ], [ "Je", -11.443764686584473 ], [ "▁gab", -11.444080352783203 ], [ "▁sigur", -11.444270133972168 ], [ "rise", -11.444416999816895 ], [ "Pri", -11.44466495513916 ], [ "▁stabil", -11.444781303405762 ], [ "▁superb", -11.445100784301758 ], [ "▁Oak", -11.44510269165039 ], [ "▁rubber", -11.445286750793457 ], [ "▁tag", -11.445306777954102 ], [ "PG", -11.445361137390137 ], [ "▁Heat", -11.445477485656738 ], [ "▁thousand", -11.445504188537598 ], [ "▁meets", -11.445521354675293 ], [ "▁faced", -11.445578575134277 ], [ "▁reserve", -11.445640563964844 ], [ "cateva", -11.445767402648926 ], [ "▁gym", -11.445771217346191 ], [ "▁vitamin", -11.445960998535156 ], [ "▁Rest", -11.446457862854004 ], [ "▁Single", -11.446535110473633 ], [ "▁Stephen", -11.446623802185059 ], [ "▁trick", -11.446824073791504 ], [ "DU", -11.44694709777832 ], [ "▁telefon", -11.44711685180664 ], [ "▁gând", -11.447120666503906 ], [ "▁primit", -11.447345733642578 ], [ "▁Connect", -11.447351455688477 ], [ "▁führt", -11.447440147399902 ], [ "▁Info", -11.447500228881836 ], [ "▁recall", -11.447848320007324 ], [ "▁restore", -11.447885513305664 ], [ "lege", -11.44792652130127 ], [ "▁franchise", -11.448189735412598 ], [ "▁seulement", -11.44856071472168 ], [ "reci", -11.448598861694336 ], [ "▁2019,", -11.44864273071289 ], [ "▁Ring", -11.448663711547852 ], [ "▁assembly", -11.448678970336914 ], [ "intérieur", -11.448775291442871 ], [ "▁shade", -11.44887924194336 ], [ "▁meaningful", -11.448881149291992 ], [ "bag", -11.448989868164062 ], [ "ONE", -11.449249267578125 ], [ "▁globe", -11.449287414550781 ], [ "▁WA", -11.449406623840332 ], [ "▁intervention", -11.449495315551758 ], [ "öl", -11.449531555175781 ], [ "▁Marine", -11.45029067993164 ], [ "▁Angebot", -11.450512886047363 ], [ "▁align", -11.450618743896484 ], [ "▁temperatures", -11.450634956359863 ], [ "ifier", -11.45091724395752 ], [ "▁Nigeria", -11.451189041137695 ], [ "▁survive", -11.451216697692871 ], [ "ounce", -11.451275825500488 ], [ "▁placement", -11.451416969299316 ], [ "▁deci", -11.451528549194336 ], [ "▁Taylor", -11.451759338378906 ], [ "step", -11.45190715789795 ], [ "▁Geschichte", -11.452054023742676 ], [ "▁Bet", -11.452169418334961 ], [ "▁Nature", -11.45224380493164 ], [ "▁FC", -11.452256202697754 ], [ "▁ownership", -11.452286720275879 ], [ "▁behaviour", -11.452474594116211 ], [ "▁deutlich", -11.452532768249512 ], [ "▁wondering", -11.452798843383789 ], [ "▁cleaner", -11.453295707702637 ], [ "uring", -11.4534912109375 ], [ "rä", -11.453496932983398 ], [ "▁ga", -11.454296112060547 ], [ "ador", -11.454482078552246 ], [ "▁artwork", -11.454564094543457 ], [ "ologic", -11.45457649230957 ], [ "▁eigentlich", -11.454848289489746 ], [ "▁hell", -11.45522403717041 ], [ "source", -11.455251693725586 ], [ "▁gem", -11.455265045166016 ], [ "▁boss", -11.455307006835938 ], [ "▁arise", -11.455460548400879 ], [ "about", -11.455711364746094 ], [ "▁SI", -11.455951690673828 ], [ "▁ME", -11.45610237121582 ], [ "akt", -11.456191062927246 ], [ "▁Style", -11.456259727478027 ], [ "▁Körper", -11.456493377685547 ], [ "gui", -11.456799507141113 ], [ "▁navigate", -11.456819534301758 ], [ "▁Meanwhile", -11.456977844238281 ], [ "▁așa", -11.457111358642578 ], [ "▁bulk", -11.457298278808594 ], [ "▁directions", -11.457310676574707 ], [ "▁brick", -11.457747459411621 ], [ "▁Poly", -11.457752227783203 ], [ "▁politique", -11.457772254943848 ], [ "▁patch", -11.457777976989746 ], [ "ра", -11.457816123962402 ], [ "commerce", -11.457844734191895 ], [ "▁înainte", -11.457884788513184 ], [ "▁intelligent", -11.45823860168457 ], [ "▁infection", -11.458426475524902 ], [ "▁Tru", -11.458494186401367 ], [ "▁raising", -11.458504676818848 ], [ "tragen", -11.458539009094238 ], [ "▁portrait", -11.45858383178711 ], [ "▁meisten", -11.458783149719238 ], [ "▁organize", -11.45893669128418 ], [ "metric", -11.458962440490723 ], [ "▁Season", -11.459036827087402 ], [ "▁enforcement", -11.459259033203125 ], [ "origine", -11.459836959838867 ], [ "▁Ros", -11.460065841674805 ], [ "▁Mount", -11.460083961486816 ], [ "have", -11.460237503051758 ], [ "▁romantic", -11.460258483886719 ], [ "▁comic", -11.460810661315918 ], [ "▁greu", -11.461116790771484 ], [ "ET", -11.46133041381836 ], [ "▁hook", -11.461407661437988 ], [ "▁mort", -11.461411476135254 ], [ "▁indicated", -11.461583137512207 ], [ "▁7,", -11.461982727050781 ], [ "▁Neben", -11.46204662322998 ], [ "yer", -11.46214485168457 ], [ "▁momentul", -11.46214771270752 ], [ "note", -11.462313652038574 ], [ "▁baz", -11.46231460571289 ], [ "▁abroad", -11.462320327758789 ], [ "nite", -11.462464332580566 ], [ "▁bass", -11.462701797485352 ], [ "▁norm", -11.462714195251465 ], [ "▁É", -11.462788581848145 ], [ "4.", -11.462881088256836 ], [ "▁province", -11.463004112243652 ], [ "▁merge", -11.463419914245605 ], [ "arbeiten", -11.463438987731934 ], [ "-20", -11.463574409484863 ], [ "▁Nicht", -11.463674545288086 ], [ "spo", -11.463783264160156 ], [ "size", -11.463815689086914 ], [ "▁assure", -11.463849067687988 ], [ "charge", -11.463987350463867 ], [ "▁olive", -11.464017868041992 ], [ "▁Pot", -11.46408462524414 ], [ "▁Figure", -11.4642333984375 ], [ "clair", -11.464336395263672 ], [ "▁discipline", -11.464600563049316 ], [ "elli", -11.464639663696289 ], [ "▁tackle", -11.465169906616211 ], [ "▁buyer", -11.465237617492676 ], [ "▁loud", -11.465479850769043 ], [ "▁180", -11.465534210205078 ], [ "▁căt", -11.465587615966797 ], [ "▁Palm", -11.465738296508789 ], [ "away", -11.46593189239502 ], [ "▁Mother", -11.46607494354248 ], [ "onia", -11.466240882873535 ], [ "▁Protection", -11.466416358947754 ], [ "auto", -11.466547966003418 ], [ "▁Version", -11.466583251953125 ], [ "▁Nice", -11.466714859008789 ], [ "▁12.", -11.46682071685791 ], [ "▁0,", -11.466835021972656 ], [ "ATION", -11.466911315917969 ], [ "▁Produkte", -11.466955184936523 ], [ "▁tube", -11.467084884643555 ], [ "▁Houston", -11.467106819152832 ], [ "chu", -11.467500686645508 ], [ "pas", -11.467717170715332 ], [ "▁Ele", -11.467801094055176 ], [ "▁mountains", -11.467835426330566 ], [ "PH", -11.467937469482422 ], [ "▁languages", -11.468672752380371 ], [ "▁servicii", -11.468722343444824 ], [ "▁Stay", -11.468999862670898 ], [ "fil", -11.469138145446777 ], [ "▁propos", -11.469801902770996 ], [ "▁coll", -11.469825744628906 ], [ "▁mor", -11.470197677612305 ], [ "▁arrange", -11.470410346984863 ], [ "▁sorry", -11.470475196838379 ], [ "▁instruction", -11.470723152160645 ], [ "▁holes", -11.47077465057373 ], [ "letting", -11.471046447753906 ], [ "▁wa", -11.471074104309082 ], [ "▁Feb", -11.471227645874023 ], [ "omb", -11.471232414245605 ], [ "▁prise", -11.471290588378906 ], [ "VO", -11.471305847167969 ], [ "week", -11.471349716186523 ], [ "▁Event", -11.471427917480469 ], [ "▁AT", -11.471485137939453 ], [ "ket", -11.471492767333984 ], [ "haft", -11.471579551696777 ], [ "▁hits", -11.47159194946289 ], [ "foli", -11.471681594848633 ], [ "this", -11.471948623657227 ], [ "GP", -11.471970558166504 ], [ "▁Pin", -11.472332954406738 ], [ "▁Stein", -11.472503662109375 ], [ "thing", -11.472512245178223 ], [ "▁emphasis", -11.472556114196777 ], [ "▁Mur", -11.472631454467773 ], [ "▁Bag", -11.472647666931152 ], [ "cons", -11.47273063659668 ], [ "tons", -11.472835540771484 ], [ "lash", -11.472987174987793 ], [ "▁Grant", -11.473104476928711 ], [ "▁pris", -11.473175048828125 ], [ "▁bună", -11.47323989868164 ], [ "▁buc", -11.473699569702148 ], [ "▁passe", -11.473746299743652 ], [ "▁jewelry", -11.474213600158691 ], [ "iens", -11.474342346191406 ], [ "▁forma", -11.47453784942627 ], [ "▁Med", -11.474651336669922 ], [ "laufen", -11.474778175354004 ], [ "▁hunt", -11.474977493286133 ], [ "stayed", -11.475086212158203 ], [ "party", -11.475152015686035 ], [ "▁fra", -11.47529411315918 ], [ "▁scenes", -11.475305557250977 ], [ "▁absorb", -11.47535228729248 ], [ "▁abilities", -11.475377082824707 ], [ "lug", -11.475507736206055 ], [ "▁Sarah", -11.475693702697754 ], [ "mpf", -11.47570514678955 ], [ "▁fle", -11.4757080078125 ], [ "accès", -11.475872993469238 ], [ "▁solicit", -11.475926399230957 ], [ "pie", -11.476278305053711 ], [ "▁Zum", -11.476296424865723 ], [ "▁universe", -11.476390838623047 ], [ "▁exists", -11.476449012756348 ], [ "oane", -11.476597785949707 ], [ "IVE", -11.47668743133545 ], [ "▁2011.", -11.476906776428223 ], [ "▁specialists", -11.477072715759277 ], [ "▁mess", -11.477309226989746 ], [ "fach", -11.477402687072754 ], [ "▁Recht", -11.477404594421387 ], [ "▁hack", -11.47755241394043 ], [ "▁jacket", -11.477564811706543 ], [ "HC", -11.47769832611084 ], [ "▁substance", -11.477728843688965 ], [ "▁signing", -11.477775573730469 ], [ "▁allerdings", -11.478032112121582 ], [ "▁publish", -11.478139877319336 ], [ "▁Lab", -11.478157043457031 ], [ "▁agenda", -11.478249549865723 ], [ "lane", -11.478299140930176 ], [ "stream", -11.478620529174805 ], [ "schau", -11.47879409790039 ], [ "▁realizat", -11.478971481323242 ], [ "▁supplier", -11.479019165039062 ], [ "▁moderate", -11.47902774810791 ], [ "▁tours", -11.479212760925293 ], [ "▁narrative", -11.479220390319824 ], [ "ația", -11.479279518127441 ], [ "▁maps", -11.479423522949219 ], [ "treten", -11.479447364807129 ], [ "▁mars", -11.479706764221191 ], [ "▁moon", -11.479745864868164 ], [ "rose", -11.479751586914062 ], [ "▁exp", -11.479766845703125 ], [ "zahl", -11.480154037475586 ], [ "psych", -11.480195999145508 ], [ "▁gehört", -11.48024845123291 ], [ "▁bound", -11.4803466796875 ], [ "▁submission", -11.480451583862305 ], [ "▁clubs", -11.480722427368164 ], [ "Am", -11.480755805969238 ], [ "tenir", -11.480782508850098 ], [ "▁boast", -11.480851173400879 ], [ "▁boards", -11.4810791015625 ], [ "▁Geschäfts", -11.481216430664062 ], [ "zing", -11.48126220703125 ], [ "wort", -11.48137092590332 ], [ "lid", -11.481417655944824 ], [ "▁contractor", -11.481528282165527 ], [ "▁donner", -11.481672286987305 ], [ "▁coupon", -11.481974601745605 ], [ "adresse", -11.482004165649414 ], [ "colo", -11.48210334777832 ], [ "▁perception", -11.482124328613281 ], [ "NC", -11.48222541809082 ], [ "▁abge", -11.482245445251465 ], [ "▁cheaper", -11.482268333435059 ], [ "▁grace", -11.482312202453613 ], [ "▁resident", -11.482718467712402 ], [ "kla", -11.4828462600708 ], [ "▁bug", -11.4828462600708 ], [ "▁Available", -11.482893943786621 ], [ "▁BA", -11.483323097229004 ], [ "▁Met", -11.483601570129395 ], [ "▁climb", -11.48365592956543 ], [ "▁expanded", -11.484349250793457 ], [ "ying", -11.484426498413086 ], [ "▁matching", -11.484469413757324 ], [ "▁suffered", -11.484733581542969 ], [ "▁employed", -11.484755516052246 ], [ "pper", -11.484843254089355 ], [ "▁experiencing", -11.484884262084961 ], [ "ddy", -11.484953880310059 ], [ "▁philosophy", -11.484955787658691 ], [ "▁utilisé", -11.485008239746094 ], [ "▁Jane", -11.485079765319824 ], [ "LI", -11.485087394714355 ], [ "▁elected", -11.485185623168945 ], [ "▁MI", -11.485264778137207 ], [ "▁ISO", -11.485340118408203 ], [ "winning", -11.48537540435791 ], [ "▁vot", -11.485424041748047 ], [ "▁generic", -11.485519409179688 ], [ "▁Bol", -11.485650062561035 ], [ "▁copies", -11.48568058013916 ], [ "▁mechanical", -11.48568058013916 ], [ "günstig", -11.485682487487793 ], [ "roy", -11.485770225524902 ], [ "Astfel", -11.485808372497559 ], [ "media", -11.485868453979492 ], [ "▁shoulder", -11.4859037399292 ], [ "▁directory", -11.486000061035156 ], [ "▁banking", -11.486016273498535 ], [ "▁mistakes", -11.486040115356445 ], [ "▁Fran", -11.486425399780273 ], [ "▁Jon", -11.486544609069824 ], [ "▁spare", -11.486579895019531 ], [ "metri", -11.486668586730957 ], [ "▁mask", -11.486879348754883 ], [ "▁consistently", -11.48695182800293 ], [ "▁Columbia", -11.487278938293457 ], [ "roid", -11.48774242401123 ], [ "essen", -11.487935066223145 ], [ "▁(“", -11.48798656463623 ], [ "▁série", -11.488212585449219 ], [ "▁Phil", -11.488249778747559 ], [ "▁usor", -11.488249778747559 ], [ "▁stood", -11.488279342651367 ], [ "▁racing", -11.488335609436035 ], [ "▁Comme", -11.488555908203125 ], [ "▁exceed", -11.488565444946289 ], [ "на", -11.488618850708008 ], [ "▁activate", -11.48873233795166 ], [ "▁circle", -11.488836288452148 ], [ "▁bold", -11.488956451416016 ], [ "▁handy", -11.48909854888916 ], [ "merely", -11.489114761352539 ], [ "▁Edward", -11.489147186279297 ], [ "▁contracts", -11.489530563354492 ], [ "ê", -11.489595413208008 ], [ "▁campaigns", -11.489673614501953 ], [ "▁ought", -11.489733695983887 ], [ "▁nursing", -11.489781379699707 ], [ "▁Jr", -11.489917755126953 ], [ "▁rarely", -11.490032196044922 ], [ "▁Mir", -11.490050315856934 ], [ "▁diagnosis", -11.490379333496094 ], [ "▁Theatre", -11.490394592285156 ], [ "▁producer", -11.490407943725586 ], [ "Currently", -11.490492820739746 ], [ "▁fitting", -11.490580558776855 ], [ "▁ajunge", -11.490618705749512 ], [ "minte", -11.490754127502441 ], [ "▁termen", -11.490838050842285 ], [ "▁Linux", -11.491013526916504 ], [ "▁1-", -11.491068840026855 ], [ "▁hätte", -11.491202354431152 ], [ "▁Resort", -11.49129867553711 ], [ "image", -11.491527557373047 ], [ "▁Rod", -11.49189281463623 ], [ "▁Fly", -11.491924285888672 ], [ "try", -11.492317199707031 ], [ "▁expense", -11.49245834350586 ], [ "▁Interior", -11.492799758911133 ], [ "▁fence", -11.492920875549316 ], [ "▁Kontakt", -11.493063926696777 ], [ "▁ALL", -11.493142127990723 ], [ "VA", -11.493229866027832 ], [ "▁Exchange", -11.493316650390625 ], [ "ranked", -11.493558883666992 ], [ "▁Performance", -11.493621826171875 ], [ "prim", -11.493635177612305 ], [ "▁basket", -11.493694305419922 ], [ "▁Vice", -11.493703842163086 ], [ "phan", -11.4937105178833 ], [ "▁broke", -11.494003295898438 ], [ "voir", -11.49431324005127 ], [ "arg", -11.494512557983398 ], [ "ART", -11.494529724121094 ], [ "▁floors", -11.494856834411621 ], [ "pression", -11.495025634765625 ], [ "▁possession", -11.49507999420166 ], [ "▁domaine", -11.49510669708252 ], [ "▁valeur", -11.495132446289062 ], [ "▁suddenly", -11.495282173156738 ], [ "▁mild", -11.495304107666016 ], [ "▁aflat", -11.495431900024414 ], [ "▁Tea", -11.495731353759766 ], [ "tritt", -11.495767593383789 ], [ "▁Mittel", -11.495773315429688 ], [ "▁regulatory", -11.49580192565918 ], [ "▁spectacular", -11.495905876159668 ], [ "fahrt", -11.495949745178223 ], [ "GS", -11.496026039123535 ], [ "MM", -11.4961576461792 ], [ "▁environments", -11.496203422546387 ], [ "▁Raum", -11.496381759643555 ], [ "▁lay", -11.496664047241211 ], [ "▁cré", -11.496713638305664 ], [ "▁Selbst", -11.496726989746094 ], [ "▁opposition", -11.496821403503418 ], [ "two", -11.49729061126709 ], [ "▁Clark", -11.497822761535645 ], [ "▁Netz", -11.497845649719238 ], [ "bald", -11.497983932495117 ], [ "▁Innovation", -11.4982271194458 ], [ "▁overcome", -11.49825382232666 ], [ "quot", -11.499013900756836 ], [ "▁Sin", -11.499106407165527 ], [ "▁Sto", -11.499320983886719 ], [ "▁grain", -11.499560356140137 ], [ "▁collections", -11.499724388122559 ], [ "▁applies", -11.49986743927002 ], [ "mach", -11.499934196472168 ], [ "▁wheels", -11.499958992004395 ], [ "▁universities", -11.500049591064453 ], [ "▁Ray", -11.500182151794434 ], [ "lina", -11.500238418579102 ], [ "▁arrangements", -11.500393867492676 ], [ "▁western", -11.500728607177734 ], [ "rous", -11.500768661499023 ], [ "aise", -11.500784873962402 ], [ "▁highlights", -11.50112533569336 ], [ "▁intend", -11.501265525817871 ], [ "aimed", -11.501358032226562 ], [ "▁Scotland", -11.501360893249512 ], [ "▁acestei", -11.501466751098633 ], [ "graf", -11.50150203704834 ], [ "duction", -11.501517295837402 ], [ "path", -11.50156021118164 ], [ "▁evil", -11.501633644104004 ], [ "▁scris", -11.501791000366211 ], [ "▁disposition", -11.501927375793457 ], [ "▁designing", -11.5020751953125 ], [ "zwar", -11.502172470092773 ], [ "▁Retrieve", -11.50217342376709 ], [ "▁aggressive", -11.502374649047852 ], [ "▁Glen", -11.502411842346191 ], [ "▁daher", -11.502473831176758 ], [ "▁Quick", -11.502494812011719 ], [ "▁recover", -11.502632141113281 ], [ "▁prominent", -11.50288200378418 ], [ "▁visits", -11.503198623657227 ], [ "▁Mis", -11.503376960754395 ], [ "▁edited", -11.503456115722656 ], [ "▁distributed", -11.503564834594727 ], [ "▁dés", -11.503580093383789 ], [ "▁alter", -11.5035982131958 ], [ "▁cooked", -11.503697395324707 ], [ "embl", -11.503706932067871 ], [ "Univers", -11.503715515136719 ], [ "▁Minuten", -11.504156112670898 ], [ "▁compris", -11.504179954528809 ], [ "rais", -11.504182815551758 ], [ "essentially", -11.504199028015137 ], [ "▁rel", -11.504340171813965 ], [ "▁appel", -11.504570007324219 ], [ "▁trace", -11.504788398742676 ], [ "relating", -11.504830360412598 ], [ "dès", -11.504937171936035 ], [ "aste", -11.504961013793945 ], [ "▁raison", -11.504963874816895 ], [ "▁frequent", -11.505281448364258 ], [ "▁beds", -11.505316734313965 ], [ "▁Miami", -11.505511283874512 ], [ "▁vibrant", -11.50564193725586 ], [ "▁Kam", -11.505721092224121 ], [ "▁klar", -11.505861282348633 ], [ "▁Tan", -11.50598430633545 ], [ "▁vidéo", -11.506032943725586 ], [ "▁Kur", -11.506115913391113 ], [ "▁themes", -11.506134033203125 ], [ "▁struggling", -11.506440162658691 ], [ "▁Magazine", -11.506444931030273 ], [ "maker", -11.506476402282715 ], [ "veni", -11.506564140319824 ], [ "▁Groß", -11.506732940673828 ], [ "▁streaming", -11.506772994995117 ], [ "▁analyze", -11.506876945495605 ], [ "▁titles", -11.506982803344727 ], [ "pier", -11.507316589355469 ], [ "▁participant", -11.507347106933594 ], [ "aims", -11.507607460021973 ], [ "▁convention", -11.507638931274414 ], [ "▁flood", -11.507780075073242 ], [ "▁nights", -11.507842063903809 ], [ "▁titre", -11.50792407989502 ], [ "▁voul", -11.508010864257812 ], [ "weit", -11.50816822052002 ], [ "where", -11.508213996887207 ], [ "▁Seiten", -11.508286476135254 ], [ "▁relaxing", -11.508628845214844 ], [ "▁piano", -11.50883674621582 ], [ "▁Pick", -11.508842468261719 ], [ "▁Sony", -11.508955001831055 ], [ "▁enhanced", -11.509017944335938 ], [ "▁visa", -11.50915241241455 ], [ "CH", -11.50930118560791 ], [ "▁instantly", -11.50930404663086 ], [ "▁Fan", -11.509721755981445 ], [ "▁diabetes", -11.509988784790039 ], [ "▁popul", -11.50999641418457 ], [ "Ang", -11.510232925415039 ], [ "▁Ask", -11.510295867919922 ], [ "cate", -11.510650634765625 ], [ "▁simplu", -11.510666847229004 ], [ "nahme", -11.510685920715332 ], [ "▁dentist", -11.510842323303223 ], [ "ubi", -11.510920524597168 ], [ "article", -11.511030197143555 ], [ "▁graph", -11.511094093322754 ], [ "▁rival", -11.51121711730957 ], [ "jahr", -11.5113525390625 ], [ "▁bloc", -11.511370658874512 ], [ "fern", -11.511427879333496 ], [ "▁dispar", -11.511516571044922 ], [ "▁servers", -11.511582374572754 ], [ "▁patru", -11.511610984802246 ], [ "▁Within", -11.511634826660156 ], [ "▁situated", -11.511896133422852 ], [ "▁HR", -11.511981964111328 ], [ "▁leaf", -11.511981964111328 ], [ "▁curs", -11.512049674987793 ], [ "antes", -11.512325286865234 ], [ "lux", -11.512406349182129 ], [ "▁1993", -11.512463569641113 ], [ "stance", -11.512650489807129 ], [ "▁northern", -11.512683868408203 ], [ "lves", -11.512718200683594 ], [ "▁contractors", -11.512882232666016 ], [ "▁dimensions", -11.512920379638672 ], [ "▁rolling", -11.513068199157715 ], [ "▁automobile", -11.513211250305176 ], [ "▁cru", -11.51342487335205 ], [ "▁displays", -11.513570785522461 ], [ "web", -11.513812065124512 ], [ "had", -11.513850212097168 ], [ "▁Never", -11.513893127441406 ], [ "▁2-", -11.513932228088379 ], [ "vine", -11.51393985748291 ], [ "▁Wahl", -11.513975143432617 ], [ "▁Markt", -11.514166831970215 ], [ "▁Double", -11.514227867126465 ], [ "▁acknowledge", -11.514229774475098 ], [ "stal", -11.514288902282715 ], [ "▁equity", -11.514620780944824 ], [ "▁ministry", -11.514823913574219 ], [ "▁Lor", -11.514875411987305 ], [ "▁sud", -11.514968872070312 ], [ "idée", -11.515044212341309 ], [ "▁measured", -11.515448570251465 ], [ "▁editing", -11.515609741210938 ], [ "▁singur", -11.515620231628418 ], [ "▁coal", -11.515623092651367 ], [ "▁dramatic", -11.516212463378906 ], [ "AG", -11.516251564025879 ], [ "asca", -11.516280174255371 ], [ "▁crash", -11.516321182250977 ], [ "ischer", -11.516597747802734 ], [ "▁Pla", -11.516871452331543 ], [ "▁psycho", -11.517054557800293 ], [ "piece", -11.517118453979492 ], [ "▁finger", -11.517121315002441 ], [ "▁Hollywood", -11.517123222351074 ], [ "▁Cr", -11.517345428466797 ], [ "▁locally", -11.517622947692871 ], [ "▁mouse", -11.517792701721191 ], [ "▁Base", -11.517867088317871 ], [ "uite", -11.518095016479492 ], [ "▁detect", -11.518099784851074 ], [ "cea", -11.518150329589844 ], [ "▁bull", -11.518194198608398 ], [ "▁curve", -11.518208503723145 ], [ "été", -11.518218994140625 ], [ "ddle", -11.51839542388916 ], [ "▁span", -11.518523216247559 ], [ "WS", -11.518878936767578 ], [ "CL", -11.519017219543457 ], [ "▁officially", -11.519042015075684 ], [ "▁corect", -11.519168853759766 ], [ "▁Artikel", -11.5193510055542 ], [ "▁customized", -11.520099639892578 ], [ "▁intellectual", -11.52018928527832 ], [ "▁heures", -11.520334243774414 ], [ "schule", -11.520444869995117 ], [ "▁investing", -11.520585060119629 ], [ "▁parallel", -11.521227836608887 ], [ "▁loi", -11.521263122558594 ], [ "ările", -11.521566390991211 ], [ "р", -11.521679878234863 ], [ "▁bench", -11.521724700927734 ], [ "▁principle", -11.521756172180176 ], [ "▁Galaxy", -11.521829605102539 ], [ "ța", -11.522237777709961 ], [ "▁(4", -11.522418975830078 ], [ "▁bedrooms", -11.522578239440918 ], [ "née", -11.52273941040039 ], [ "▁surely", -11.52275276184082 ], [ "very", -11.522927284240723 ], [ "stelle", -11.523200988769531 ], [ "activ", -11.523216247558594 ], [ "cite", -11.523551940917969 ], [ "▁Original", -11.523553848266602 ], [ "▁palm", -11.523665428161621 ], [ "▁losses", -11.523934364318848 ], [ "▁newspaper", -11.524153709411621 ], [ "ciu", -11.52436351776123 ], [ "▁Hold", -11.524392127990723 ], [ "BO", -11.524422645568848 ], [ "▁CON", -11.524598121643066 ], [ "▁modified", -11.524624824523926 ], [ "▁stake", -11.524735450744629 ], [ "▁Ton", -11.524798393249512 ], [ "▁luna", -11.524968147277832 ], [ "▁Mind", -11.525094985961914 ], [ "lap", -11.525150299072266 ], [ "▁opinions", -11.525247573852539 ], [ "▁Jordan", -11.525351524353027 ], [ "div", -11.52537727355957 ], [ "indi", -11.525418281555176 ], [ "▁Story", -11.525476455688477 ], [ "▁affiliate", -11.52585506439209 ], [ "▁matière", -11.525918960571289 ], [ "▁fifth", -11.526399612426758 ], [ "▁sheets", -11.52645492553711 ], [ "▁puțin", -11.526909828186035 ], [ "ush", -11.526947021484375 ], [ "geführt", -11.526993751525879 ], [ "▁Falls", -11.527168273925781 ], [ "legi", -11.527295112609863 ], [ "▁auction", -11.527326583862305 ], [ "▁cooperation", -11.52735424041748 ], [ "▁Fee", -11.527474403381348 ], [ "▁Daily", -11.52774715423584 ], [ "pies", -11.527853965759277 ], [ "▁basketball", -11.527976036071777 ], [ "removing", -11.528056144714355 ], [ "Besides", -11.528294563293457 ], [ "▁Body", -11.528355598449707 ], [ "▁AD", -11.528369903564453 ], [ "RU", -11.528435707092285 ], [ "ţia", -11.52894401550293 ], [ "▁Extra", -11.528986930847168 ], [ "▁Practice", -11.52900218963623 ], [ "▁Jeff", -11.529017448425293 ], [ "▁început", -11.529253005981445 ], [ "ching", -11.529269218444824 ], [ "▁Gift", -11.529281616210938 ], [ "kk", -11.529295921325684 ], [ "\")", -11.529349327087402 ], [ "▁Austin", -11.529651641845703 ], [ "thro", -11.529766082763672 ], [ "▁camping", -11.529810905456543 ], [ "▁theatre", -11.529850959777832 ], [ "école", -11.529916763305664 ], [ "vient", -11.530159950256348 ], [ "▁faces", -11.530226707458496 ], [ "▁constructed", -11.530437469482422 ], [ "▁overnight", -11.530472755432129 ], [ "▁locale", -11.530574798583984 ], [ "▁roots", -11.530611038208008 ], [ "▁bu", -11.530662536621094 ], [ "4,", -11.530683517456055 ], [ "▁Enterprise", -11.530865669250488 ], [ "screen", -11.530935287475586 ], [ "▁Chef", -11.53096866607666 ], [ "▁Along", -11.531298637390137 ], [ "▁MD", -11.531431198120117 ], [ "▁Supreme", -11.531597137451172 ], [ "En", -11.531655311584473 ], [ "▁verwendet", -11.532015800476074 ], [ "▁processed", -11.532425880432129 ], [ "▁vendors", -11.532549858093262 ], [ "▁FA", -11.532651901245117 ], [ "▁44", -11.532716751098633 ], [ "▁beautifully", -11.532933235168457 ], [ "▁eficient", -11.533092498779297 ], [ "▁Wil", -11.533117294311523 ], [ "▁Member", -11.533121109008789 ], [ "▁damages", -11.5332670211792 ], [ "▁mutual", -11.533288955688477 ], [ "SN", -11.533506393432617 ], [ "▁Dave", -11.533665657043457 ], [ "??", -11.533998489379883 ], [ "stat", -11.534090995788574 ], [ "▁tourist", -11.534374237060547 ], [ "fie", -11.534425735473633 ], [ "şte", -11.534754753112793 ], [ "▁donne", -11.534764289855957 ], [ "▁shadow", -11.53493881225586 ], [ "▁dough", -11.534993171691895 ], [ "▁Gro", -11.535002708435059 ], [ "▁Mah", -11.535066604614258 ], [ "RF", -11.535126686096191 ], [ "▁mechanism", -11.535163879394531 ], [ "▁2011,", -11.535179138183594 ], [ "▁Alter", -11.53530502319336 ], [ "▁opposed", -11.53538990020752 ], [ "▁Fri", -11.535501480102539 ], [ "▁remarkable", -11.535572052001953 ], [ "oral", -11.535635948181152 ], [ "▁verschiedene", -11.535653114318848 ], [ "▁difficulty", -11.535691261291504 ], [ "▁Application", -11.535840034484863 ], [ "▁Hay", -11.535888671875 ], [ "▁continua", -11.535935401916504 ], [ "EP", -11.53609848022461 ], [ "▁Pr", -11.53617000579834 ], [ "▁Lady", -11.53631591796875 ], [ "▁interval", -11.536457061767578 ], [ "▁Mil", -11.536504745483398 ], [ "▁2010.", -11.537042617797852 ], [ "VE", -11.537074089050293 ], [ "integr", -11.537360191345215 ], [ "▁création", -11.537415504455566 ], [ "weed", -11.537456512451172 ], [ "EG", -11.53760051727295 ], [ "▁6,", -11.537784576416016 ], [ "▁god", -11.537866592407227 ], [ "▁accomplish", -11.537947654724121 ], [ "▁thoroughly", -11.538019180297852 ], [ "2019", -11.538228988647461 ], [ "izer", -11.538246154785156 ], [ "▁Wal", -11.538300514221191 ], [ "ifying", -11.538701057434082 ], [ "▁Wohn", -11.539227485656738 ], [ "▁Holz", -11.539474487304688 ], [ "▁Advanced", -11.539528846740723 ], [ "▁honey", -11.539626121520996 ], [ "proof", -11.539634704589844 ], [ "▁saison", -11.540029525756836 ], [ "ându", -11.540035247802734 ], [ "▁Kevin", -11.540116310119629 ], [ "▁shelter", -11.540199279785156 ], [ "▁discut", -11.540257453918457 ], [ "▁hike", -11.540257453918457 ], [ "ités", -11.540461540222168 ], [ "▁boutique", -11.540672302246094 ], [ "▁Email", -11.54067611694336 ], [ "▁cosmetic", -11.540830612182617 ], [ "dian", -11.540916442871094 ], [ "▁hohe", -11.540940284729004 ], [ "▁absence", -11.541071891784668 ], [ "axi", -11.541136741638184 ], [ "nah", -11.541178703308105 ], [ "▁Frauen", -11.541236877441406 ], [ "▁actively", -11.541278839111328 ], [ "bind", -11.541468620300293 ], [ "▁everybody", -11.541740417480469 ], [ "▁controller", -11.541802406311035 ], [ "▁1.5", -11.5418062210083 ], [ "erau", -11.541842460632324 ], [ "gehen", -11.541988372802734 ], [ "▁scenario", -11.542038917541504 ], [ "▁odd", -11.542083740234375 ], [ "▁Ultra", -11.542089462280273 ], [ "▁finishing", -11.542366981506348 ], [ "▁cuts", -11.542383193969727 ], [ "▁financing", -11.542515754699707 ], [ "▁Chance", -11.542579650878906 ], [ "surrounded", -11.542818069458008 ], [ "▁joc", -11.542903900146484 ], [ "▁shelf", -11.543004035949707 ], [ "tief", -11.54308032989502 ], [ "▁Sir", -11.543146133422852 ], [ "▁Agent", -11.543197631835938 ], [ "▁scratch", -11.543560981750488 ], [ "2,000", -11.54360294342041 ], [ "nutri", -11.54365348815918 ], [ "nier", -11.544063568115234 ], [ "▁Dur", -11.544175148010254 ], [ "▁grid", -11.544268608093262 ], [ "road", -11.544413566589355 ], [ "▁pets", -11.544429779052734 ], [ "stud", -11.54448127746582 ], [ "OM", -11.544569969177246 ], [ "Die", -11.544877052307129 ], [ "▁800", -11.54496955871582 ], [ "▁arrangement", -11.545088768005371 ], [ "▁Sri", -11.545185089111328 ], [ "▁Patrick", -11.545187950134277 ], [ "ava", -11.545212745666504 ], [ "▁pension", -11.54523754119873 ], [ "dung", -11.545353889465332 ], [ "▁Chapter", -11.545475006103516 ], [ "▁Property", -11.545475006103516 ], [ "▁structural", -11.545571327209473 ], [ "▁overview", -11.545731544494629 ], [ "2015", -11.545917510986328 ], [ "▁lawn", -11.545924186706543 ], [ "▁Vin", -11.546219825744629 ], [ "lik", -11.546402931213379 ], [ "dus", -11.546418190002441 ], [ "Several", -11.54654598236084 ], [ "▁Bou", -11.546670913696289 ], [ "▁copper", -11.546703338623047 ], [ "▁duration", -11.546867370605469 ], [ "inate", -11.546982765197754 ], [ "▁podcast", -11.547204971313477 ], [ "▁Self", -11.547208786010742 ], [ "▁Construction", -11.547491073608398 ], [ "achat", -11.54768180847168 ], [ "???", -11.547683715820312 ], [ "▁Electric", -11.547974586486816 ], [ "▁Mrs", -11.54799747467041 ], [ "▁CT", -11.548019409179688 ], [ "▁proceed", -11.548324584960938 ], [ "▁Course", -11.548333168029785 ], [ "▁Frei", -11.548699378967285 ], [ "▁heavily", -11.548868179321289 ], [ "rique", -11.548872947692871 ], [ "version", -11.549016952514648 ], [ "▁representatives", -11.549118041992188 ], [ "▁tourism", -11.549182891845703 ], [ "▁shirt", -11.5494966506958 ], [ "▁rough", -11.549507141113281 ], [ "▁weniger", -11.549735069274902 ], [ "▁keyboard", -11.550058364868164 ], [ "▁heritage", -11.550149917602539 ], [ "kat", -11.550535202026367 ], [ "assez", -11.550567626953125 ], [ "▁cabinets", -11.550591468811035 ], [ "▁Komm", -11.550762176513672 ], [ "▁impressed", -11.55078411102295 ], [ "▁Oregon", -11.550788879394531 ], [ "▁Davis", -11.55081558227539 ], [ "specialized", -11.55097770690918 ], [ "▁gross", -11.550999641418457 ], [ "Located", -11.551044464111328 ], [ "ttle", -11.551044464111328 ], [ "▁2010,", -11.551224708557129 ], [ "chan", -11.551253318786621 ], [ "mine", -11.551305770874023 ], [ "▁aduce", -11.551637649536133 ], [ "▁subsequent", -11.551729202270508 ], [ "▁demo", -11.551851272583008 ], [ "aba", -11.552209854125977 ], [ "▁shock", -11.552389144897461 ], [ "▁theater", -11.552854537963867 ], [ "▁engineers", -11.55294418334961 ], [ "▁feu", -11.553037643432617 ], [ "▁Rot", -11.553058624267578 ], [ "▁addressed", -11.553155899047852 ], [ "▁Letter", -11.553431510925293 ], [ "gré", -11.553448677062988 ], [ "▁quantity", -11.553449630737305 ], [ "▁Seit", -11.553640365600586 ], [ "▁bacteria", -11.553681373596191 ], [ "kg", -11.55408000946045 ], [ "▁conservation", -11.554191589355469 ], [ "▁entreprises", -11.55420207977295 ], [ "▁pleasant", -11.554207801818848 ], [ "armed", -11.554228782653809 ], [ "dorf", -11.554286003112793 ], [ "fact", -11.554320335388184 ], [ "▁Much", -11.554388046264648 ], [ "▁laugh", -11.55482006072998 ], [ "▁blade", -11.554835319519043 ], [ "amine", -11.554838180541992 ], [ "▁insert", -11.55493450164795 ], [ "▁toys", -11.555326461791992 ], [ "▁в", -11.555726051330566 ], [ "cell", -11.555747985839844 ], [ "▁strengthen", -11.555864334106445 ], [ "GR", -11.555882453918457 ], [ "▁autor", -11.556114196777344 ], [ "▁LI", -11.556147575378418 ], [ "▁oamenii", -11.556184768676758 ], [ "▁Modell", -11.556222915649414 ], [ "▁sophisticated", -11.556225776672363 ], [ "▁Write", -11.556283950805664 ], [ "eți", -11.556295394897461 ], [ "say", -11.556641578674316 ], [ "▁nutzen", -11.556783676147461 ], [ "▁amenities", -11.556979179382324 ], [ "chel", -11.557068824768066 ], [ "Unlike", -11.55720043182373 ], [ "▁Bilder", -11.557208061218262 ], [ "fertig", -11.55722713470459 ], [ "PER", -11.557244300842285 ], [ "▁apparently", -11.557282447814941 ], [ "▁pointed", -11.557332992553711 ], [ "lop", -11.557435989379883 ], [ "▁commande", -11.557848930358887 ], [ "▁NEW", -11.557923316955566 ], [ "▁primi", -11.55798625946045 ], [ "▁aluminum", -11.558046340942383 ], [ "ificare", -11.558063507080078 ], [ "open", -11.55815315246582 ], [ "▁establishment", -11.558305740356445 ], [ "▁blanc", -11.558349609375 ], [ "▁1960", -11.558454513549805 ], [ "▁parameters", -11.55856990814209 ], [ "schluss", -11.558685302734375 ], [ "▁jet", -11.55879020690918 ], [ "gam", -11.55902099609375 ], [ "▁oral", -11.559290885925293 ], [ "▁tons", -11.559348106384277 ], [ "▁AL", -11.55935001373291 ], [ "▁intention", -11.55947494506836 ], [ "ives", -11.55974292755127 ], [ "▁BMW", -11.559837341308594 ], [ "gun", -11.559967041015625 ], [ "leben", -11.560046195983887 ], [ "▁Fresh", -11.56010913848877 ], [ "▁tuturor", -11.560193061828613 ], [ "▁marine", -11.560208320617676 ], [ "mile", -11.560260772705078 ], [ "▁alta", -11.560271263122559 ], [ "nnen", -11.56050968170166 ], [ "▁courts", -11.560530662536621 ], [ "▁Hello", -11.560791015625 ], [ "BL", -11.560895919799805 ], [ "▁reply", -11.560962677001953 ], [ "environnement", -11.560975074768066 ], [ "American", -11.560995101928711 ], [ "▁Tell", -11.561040878295898 ], [ "▁chic", -11.56148624420166 ], [ "bir", -11.561542510986328 ], [ "▁singing", -11.561788558959961 ], [ "▁earnings", -11.561819076538086 ], [ "▁ensemble", -11.562082290649414 ], [ "▁($", -11.562169075012207 ], [ "▁Tout", -11.562192916870117 ], [ "▁Abs", -11.562264442443848 ], [ "▁describes", -11.562322616577148 ], [ "▁navigation", -11.5625 ], [ "▁destul", -11.562532424926758 ], [ "legate", -11.562586784362793 ], [ "tral", -11.562599182128906 ], [ "aţie", -11.562753677368164 ], [ "▁supplied", -11.562775611877441 ], [ "▁paar", -11.562911987304688 ], [ "ionat", -11.563241958618164 ], [ "9.", -11.563263893127441 ], [ "▁41", -11.563348770141602 ], [ "▁Track", -11.563451766967773 ], [ "▁happiness", -11.563636779785156 ], [ "▁Personen", -11.563680648803711 ], [ "▁sac", -11.56373119354248 ], [ "▁shapes", -11.563774108886719 ], [ "eld", -11.56393051147461 ], [ "bett", -11.563963890075684 ], [ "tile", -11.56400203704834 ], [ "▁divided", -11.564035415649414 ], [ "▁13.", -11.56403923034668 ], [ "market", -11.564109802246094 ], [ "crafted", -11.564115524291992 ], [ "▁periods", -11.564120292663574 ], [ "uş", -11.564568519592285 ], [ "▁trainer", -11.56460952758789 ], [ "▁Licht", -11.564871788024902 ], [ "▁advisor", -11.564948081970215 ], [ "▁Herr", -11.564980506896973 ], [ "▁Halloween", -11.565147399902344 ], [ "alter", -11.565154075622559 ], [ "▁radical", -11.565155029296875 ], [ "▁nose", -11.56527042388916 ], [ "▁Sat", -11.565323829650879 ], [ "▁Mom", -11.565372467041016 ], [ "moni", -11.565377235412598 ], [ "▁semn", -11.565397262573242 ], [ "vé", -11.565672874450684 ], [ "identifie", -11.56570053100586 ], [ "▁hatten", -11.565957069396973 ], [ "completing", -11.565959930419922 ], [ "▁gust", -11.565963745117188 ], [ "▁creat", -11.56601333618164 ], [ "ché", -11.566075325012207 ], [ "pay", -11.566216468811035 ], [ "▁Money", -11.566229820251465 ], [ "IG", -11.566243171691895 ], [ "▁Cash", -11.566327095031738 ], [ "altă", -11.566420555114746 ], [ "▁bekommen", -11.566620826721191 ], [ "▁43", -11.56662654876709 ], [ "▁supplement", -11.566637992858887 ], [ "▁Early", -11.566754341125488 ], [ "▁mattress", -11.56692123413086 ], [ "▁worn", -11.567182540893555 ], [ "rov", -11.567197799682617 ], [ "▁pray", -11.56733226776123 ], [ "▁beans", -11.567673683166504 ], [ "▁passé", -11.567782402038574 ], [ "▁facilit", -11.56782054901123 ], [ "▁meters", -11.56784439086914 ], [ "cke", -11.568163871765137 ], [ "▁Villa", -11.568199157714844 ], [ "▁Diego", -11.568217277526855 ], [ "▁chips", -11.568244934082031 ], [ "▁mes", -11.568349838256836 ], [ "▁Seattle", -11.568421363830566 ], [ "BU", -11.568621635437012 ], [ "▁nevoi", -11.568714141845703 ], [ "▁lets", -11.568737030029297 ], [ "▁hopefully", -11.56894302368164 ], [ "▁AG", -11.568954467773438 ], [ "liable", -11.568999290466309 ], [ "pound", -11.569067001342773 ], [ "près", -11.569085121154785 ], [ "arul", -11.56920337677002 ], [ "isiert", -11.569281578063965 ], [ "▁Expert", -11.569297790527344 ], [ "▁particulier", -11.569367408752441 ], [ "stoff", -11.569952964782715 ], [ "▁interpretation", -11.56999397277832 ], [ "După", -11.57007884979248 ], [ "sait", -11.57011604309082 ], [ "▁nouvelles", -11.570173263549805 ], [ "▁Ok", -11.570175170898438 ], [ "tap", -11.570301055908203 ], [ "▁targets", -11.570327758789062 ], [ "rung", -11.57052230834961 ], [ "▁stare", -11.570576667785645 ], [ "▁efficiently", -11.570908546447754 ], [ "EV", -11.571003913879395 ], [ "évit", -11.571310997009277 ], [ "▁Moldova", -11.571542739868164 ], [ "▁Face", -11.571663856506348 ], [ "▁flo", -11.57168960571289 ], [ "▁acestora", -11.5717134475708 ], [ "▁Victor", -11.57183837890625 ], [ "▁breed", -11.57198429107666 ], [ "morph", -11.572230339050293 ], [ "sley", -11.572274208068848 ], [ "mot", -11.57234001159668 ], [ "▁URL", -11.572395324707031 ], [ "ellen", -11.572502136230469 ], [ "▁resist", -11.572781562805176 ], [ "zon", -11.57282829284668 ], [ "ndel", -11.572967529296875 ], [ "will", -11.572989463806152 ], [ "▁alege", -11.573076248168945 ], [ "▁Easter", -11.573114395141602 ], [ "▁Bat", -11.573190689086914 ], [ "▁Höhe", -11.573223114013672 ], [ "▁fascinating", -11.573387145996094 ], [ "▁Know", -11.5735445022583 ], [ "illon", -11.573602676391602 ], [ "flex", -11.57363224029541 ], [ "who", -11.573701858520508 ], [ "▁Always", -11.573729515075684 ], [ "▁Bush", -11.573777198791504 ], [ "ICE", -11.574009895324707 ], [ "verein", -11.57448673248291 ], [ "▁später", -11.57448959350586 ], [ "▁cherch", -11.574575424194336 ], [ "makers", -11.574753761291504 ], [ "versus", -11.574790954589844 ], [ "▁Clear", -11.574846267700195 ], [ "▁Pennsylvania", -11.574912071228027 ], [ "Dieser", -11.575041770935059 ], [ "▁picking", -11.575072288513184 ], [ "▁restoration", -11.57513427734375 ], [ "▁interviews", -11.575201988220215 ], [ "pressed", -11.575210571289062 ], [ "nnerhalb", -11.575674057006836 ], [ "▁connecting", -11.575834274291992 ], [ "jou", -11.575943946838379 ], [ "▁react", -11.576189041137695 ], [ "▁Merci", -11.576223373413086 ], [ "▁Phone", -11.576356887817383 ], [ "▁1)", -11.57652473449707 ], [ "▁victims", -11.576618194580078 ], [ "▁Spo", -11.576685905456543 ], [ "atului", -11.576735496520996 ], [ "▁Harry", -11.576837539672852 ], [ "▁Sala", -11.576875686645508 ], [ "Pol", -11.577075958251953 ], [ "▁Clo", -11.577167510986328 ], [ "▁Erfolg", -11.577211380004883 ], [ "autour", -11.577308654785156 ], [ "▁Template", -11.577314376831055 ], [ "▁invention", -11.57754898071289 ], [ "▁schwer", -11.57761287689209 ], [ "vac", -11.577625274658203 ], [ "▁Trail", -11.577627182006836 ], [ "▁Vietnam", -11.577638626098633 ], [ "▁Size", -11.577689170837402 ], [ "▁Bern", -11.577783584594727 ], [ "▁emp", -11.577845573425293 ], [ "▁shake", -11.57787799835205 ], [ "▁Ave", -11.57794189453125 ], [ "▁productive", -11.578009605407715 ], [ "▁apple", -11.578015327453613 ], [ "▁portal", -11.578052520751953 ], [ "▁ceramic", -11.578082084655762 ], [ "▁pad", -11.578110694885254 ], [ "▁Syn", -11.578316688537598 ], [ "Ab", -11.57845401763916 ], [ "▁syn", -11.578761100769043 ], [ "find", -11.578888893127441 ], [ "▁settle", -11.578909873962402 ], [ "▁général", -11.578965187072754 ], [ "▁okay", -11.579032897949219 ], [ "▁receipt", -11.57906436920166 ], [ "orii", -11.579117774963379 ], [ "▁Mission", -11.579122543334961 ], [ "entrée", -11.579304695129395 ], [ "▁besteht", -11.579394340515137 ], [ "▁wisdom", -11.57950210571289 ], [ "▁heraus", -11.579645156860352 ], [ "▁balanced", -11.579753875732422 ], [ "▁habits", -11.579773902893066 ], [ "tang", -11.579888343811035 ], [ "ură", -11.580151557922363 ], [ "▁winners", -11.580182075500488 ], [ "ç", -11.580215454101562 ], [ "▁folosi", -11.580242156982422 ], [ "aliment", -11.5802583694458 ], [ "▁fiction", -11.580373764038086 ], [ "▁Spe", -11.580534934997559 ], [ "▁elsewhere", -11.580663681030273 ], [ "▁dependent", -11.580808639526367 ], [ "▁Anne", -11.581167221069336 ], [ "▁excellence", -11.581695556640625 ], [ "▁Feel", -11.581753730773926 ], [ "lieb", -11.581811904907227 ], [ "▁sectors", -11.581865310668945 ], [ "▁expir", -11.581886291503906 ], [ "▁surfaces", -11.58191204071045 ], [ "▁minim", -11.581937789916992 ], [ "▁tumor", -11.58204460144043 ], [ "▁paragraph", -11.582289695739746 ], [ "▁disk", -11.58232307434082 ], [ "▁tonight", -11.582379341125488 ], [ "▁precious", -11.582794189453125 ], [ "▁console", -11.58288288116455 ], [ "Th", -11.582939147949219 ], [ "neu", -11.583020210266113 ], [ "effective", -11.5839262008667 ], [ "▁Republican", -11.583944320678711 ], [ "format", -11.584297180175781 ], [ "▁preserve", -11.58436107635498 ], [ "▁wiring", -11.584599494934082 ], [ "▁exercises", -11.584757804870605 ], [ "▁pregnancy", -11.584774017333984 ], [ "tries", -11.58481502532959 ], [ "▁jeunes", -11.584883689880371 ], [ "▁publishing", -11.584932327270508 ], [ "▁nehmen", -11.584935188293457 ], [ "▁capability", -11.5849609375 ], [ "▁prompt", -11.584965705871582 ], [ "▁Further", -11.58497428894043 ], [ "▁semaine", -11.585173606872559 ], [ "abo", -11.585216522216797 ], [ "▁evolution", -11.585319519042969 ], [ "▁Sud", -11.585403442382812 ], [ "▁frais", -11.585525512695312 ], [ "LT", -11.585619926452637 ], [ "▁stack", -11.58581829071045 ], [ "▁Inside", -11.585854530334473 ], [ "▁programmes", -11.585997581481934 ], [ "▁passes", -11.586196899414062 ], [ "mü", -11.586474418640137 ], [ "▁progressive", -11.586518287658691 ], [ "▁calculator", -11.58658218383789 ], [ "▁Core", -11.586655616760254 ], [ "BT", -11.586956977844238 ], [ "core", -11.586996078491211 ], [ "▁Moon", -11.587004661560059 ], [ "▁tender", -11.587040901184082 ], [ "durch", -11.58721923828125 ], [ "▁commune", -11.587453842163086 ], [ "▁Prince", -11.587594032287598 ], [ "▁demonstrated", -11.587693214416504 ], [ "▁conversations", -11.587890625 ], [ "▁fri", -11.587984085083008 ], [ "igh", -11.587992668151855 ], [ "being", -11.588334083557129 ], [ "pause", -11.58853530883789 ], [ "▁Bear", -11.58871841430664 ], [ "ayant", -11.588875770568848 ], [ "▁Industry", -11.588967323303223 ], [ "▁sponsor", -11.589012145996094 ], [ "▁numele", -11.589098930358887 ], [ "▁VA", -11.589167594909668 ], [ "▁Sommer", -11.589366912841797 ], [ "TB", -11.589380264282227 ], [ "▁optional", -11.589505195617676 ], [ "▁Landes", -11.589812278747559 ], [ "coli", -11.589963912963867 ], [ "empt", -11.59018325805664 ], [ "▁Iron", -11.590620040893555 ], [ "▁1992", -11.59090518951416 ], [ "▁attempts", -11.59090518951416 ], [ "halb", -11.590960502624512 ], [ "▁photographer", -11.59097671508789 ], [ "▁witness", -11.59097957611084 ], [ "bru", -11.591073989868164 ], [ "▁Ras", -11.59107780456543 ], [ "▁burden", -11.591142654418945 ], [ "▁kaufen", -11.591256141662598 ], [ "▁vu", -11.591362953186035 ], [ "▁Wedding", -11.591601371765137 ], [ "▁Kla", -11.591604232788086 ], [ "occasion", -11.591915130615234 ], [ "▁keys", -11.592131614685059 ], [ "▁oferi", -11.592279434204102 ], [ "▁puzzle", -11.592302322387695 ], [ "eaux", -11.59254264831543 ], [ "▁Eco", -11.592805862426758 ], [ "▁52", -11.592817306518555 ], [ "▁Elizabeth", -11.59284496307373 ], [ "▁dispose", -11.593144416809082 ], [ "▁cluster", -11.59326171875 ], [ "iki", -11.593283653259277 ], [ "▁Guys", -11.593595504760742 ], [ "▁Economic", -11.593632698059082 ], [ "▁apar", -11.593677520751953 ], [ "▁ziua", -11.593688011169434 ], [ "▁integral", -11.593740463256836 ], [ "▁tac", -11.59376335144043 ], [ "▁restrictions", -11.593778610229492 ], [ "▁nerve", -11.593794822692871 ], [ "▁Stop", -11.59386157989502 ], [ "burger", -11.593897819519043 ], [ "explo", -11.593944549560547 ], [ "lö", -11.593958854675293 ], [ "NP", -11.594077110290527 ], [ "▁Brook", -11.59418773651123 ], [ "▁Close", -11.594278335571289 ], [ "▁representing", -11.59446907043457 ], [ "▁certaine", -11.594767570495605 ], [ "▁discovery", -11.594836235046387 ], [ "▁rece", -11.594964981079102 ], [ "FF", -11.594970703125 ], [ "▁salary", -11.595069885253906 ], [ "▁Wolf", -11.595137596130371 ], [ "▁deserve", -11.595166206359863 ], [ "ţele", -11.595417976379395 ], [ "gathered", -11.595934867858887 ], [ "▁comply", -11.59599494934082 ], [ "lagen", -11.596034049987793 ], [ "ătoare", -11.596192359924316 ], [ "▁relate", -11.596410751342773 ], [ "▁Roger", -11.59656810760498 ], [ "▁blame", -11.596575736999512 ], [ "▁Jen", -11.596914291381836 ], [ "▁army", -11.596936225891113 ], [ "▁$10", -11.597129821777344 ], [ "▁Cabinet", -11.597185134887695 ], [ "Gu", -11.597367286682129 ], [ "▁wildlife", -11.597452163696289 ], [ "▁Memorial", -11.597643852233887 ], [ "▁Holiday", -11.597742080688477 ], [ "▁curat", -11.598291397094727 ], [ "iilor", -11.598299026489258 ], [ "▁fleet", -11.598408699035645 ], [ "▁reviewed", -11.59843635559082 ], [ "cet", -11.598450660705566 ], [ "▁virtually", -11.598487854003906 ], [ "▁Crusher", -11.59852409362793 ], [ "▁slide", -11.59858226776123 ], [ "▁générale", -11.598604202270508 ], [ "▁sensation", -11.598630905151367 ], [ "▁garlic", -11.598638534545898 ], [ "5)", -11.598657608032227 ], [ "▁batteries", -11.598756790161133 ], [ "SH", -11.59876823425293 ], [ "▁seller", -11.59882926940918 ], [ "design", -11.598871231079102 ], [ "5.", -11.598944664001465 ], [ "▁Overall", -11.598969459533691 ], [ "▁investigate", -11.599058151245117 ], [ "max", -11.599064826965332 ], [ "▁attach", -11.599166870117188 ], [ "▁Future", -11.599209785461426 ], [ "OUR", -11.599284172058105 ], [ "▁LE", -11.59968090057373 ], [ "▁bite", -11.599811553955078 ], [ "tige", -11.599874496459961 ], [ "▁twist", -11.59987735748291 ], [ "hole", -11.600180625915527 ], [ "▁Tony", -11.600510597229004 ], [ "LU", -11.600598335266113 ], [ "▁Organization", -11.600617408752441 ], [ "▁invit", -11.600632667541504 ], [ "▁Ant", -11.600739479064941 ], [ "NR", -11.600788116455078 ], [ "sorgt", -11.600854873657227 ], [ "▁Lan", -11.600860595703125 ], [ "▁Manchester", -11.60091495513916 ], [ "schrift", -11.601066589355469 ], [ "▁kg", -11.601150512695312 ], [ "▁aroma", -11.60132884979248 ], [ "▁Source", -11.601388931274414 ], [ "▁permite", -11.601445198059082 ], [ "▁Consider", -11.601457595825195 ], [ "▁Artist", -11.601627349853516 ], [ "▁transmit", -11.601783752441406 ], [ "oasa", -11.601834297180176 ], [ "▁Zen", -11.60198974609375 ], [ "ANT", -11.602235794067383 ], [ "▁consulting", -11.602404594421387 ], [ "▁commence", -11.6025390625 ], [ "▁quilt", -11.60261058807373 ], [ "owned", -11.602642059326172 ], [ "▁bro", -11.602689743041992 ], [ "▁integrate", -11.602715492248535 ], [ "▁Ontario", -11.602775573730469 ], [ "TF", -11.602832794189453 ], [ "▁Study", -11.602887153625488 ], [ "▁ensuite", -11.603155136108398 ], [ "itatii", -11.603180885314941 ], [ "Mon", -11.603235244750977 ], [ "-11", -11.603299140930176 ], [ "what", -11.603384017944336 ], [ "▁Things", -11.60361385345459 ], [ "▁Eye", -11.603819847106934 ], [ "▁présente", -11.603828430175781 ], [ "tention", -11.603915214538574 ], [ "|", -11.603957176208496 ], [ "stall", -11.603963851928711 ], [ "▁beef", -11.603992462158203 ], [ "figur", -11.604005813598633 ], [ "▁cancel", -11.604146003723145 ], [ "▁domeniul", -11.604252815246582 ], [ "▁360", -11.604290008544922 ], [ "▁sleeping", -11.6045560836792 ], [ "▁traitement", -11.604580879211426 ], [ "ühl", -11.604769706726074 ], [ "▁Environmental", -11.604835510253906 ], [ "cier", -11.604894638061523 ], [ "▁NC", -11.604907035827637 ], [ "pub", -11.604925155639648 ], [ "▁addiction", -11.605071067810059 ], [ "▁nest", -11.605128288269043 ], [ "▁ON", -11.605395317077637 ], [ "▁discrimin", -11.605396270751953 ], [ "▁proved", -11.605517387390137 ], [ "▁occasions", -11.605864524841309 ], [ "OH", -11.606184959411621 ], [ "▁lawyers", -11.606203079223633 ], [ "own", -11.606290817260742 ], [ "▁Meeting", -11.606596946716309 ], [ "▁Industrial", -11.606704711914062 ], [ "owed", -11.606736183166504 ], [ "▁Cel", -11.606793403625488 ], [ "legt", -11.60706615447998 ], [ "ily", -11.607085227966309 ], [ "▁wins", -11.607155799865723 ], [ "▁strap", -11.607367515563965 ], [ "digit", -11.607441902160645 ], [ "▁hinaus", -11.607504844665527 ], [ "mple", -11.607712745666504 ], [ "▁(5", -11.607797622680664 ], [ "▁pdf", -11.607894897460938 ], [ "▁eco", -11.607915878295898 ], [ "▁junior", -11.608172416687012 ], [ "DB", -11.608556747436523 ], [ "gelegt", -11.608636856079102 ], [ "ION", -11.608678817749023 ], [ "▁competitors", -11.60880184173584 ], [ "▁Arab", -11.60898208618164 ], [ "▁Secret", -11.609148979187012 ], [ "▁Kunst", -11.609283447265625 ], [ "▁worried", -11.609297752380371 ], [ "meiner", -11.609378814697266 ], [ "▁Magic", -11.609450340270996 ], [ "▁groß", -11.609537124633789 ], [ "▁travaux", -11.609748840332031 ], [ "▁sollen", -11.609772682189941 ], [ "▁Sciences", -11.609850883483887 ], [ "▁athletes", -11.610055923461914 ], [ "▁discounts", -11.610079765319824 ], [ "kit", -11.610211372375488 ], [ "lind", -11.610305786132812 ], [ "▁enjoyable", -11.610421180725098 ], [ "ground", -11.610489845275879 ], [ "▁Tat", -11.610529899597168 ], [ "▁passengers", -11.610576629638672 ], [ "▁Dami", -11.610677719116211 ], [ "▁Major", -11.61070728302002 ], [ "watch", -11.610796928405762 ], [ "working", -11.610908508300781 ], [ "arrêt", -11.610923767089844 ], [ "▁subtle", -11.611069679260254 ], [ "▁epi", -11.611197471618652 ], [ "▁Jahres", -11.61128044128418 ], [ "▁cooling", -11.61141586303711 ], [ "▁makeup", -11.611427307128906 ], [ "jet", -11.611495018005371 ], [ "▁Given", -11.611519813537598 ], [ "plex", -11.61158275604248 ], [ "▁exploit", -11.611590385437012 ], [ "rine", -11.611604690551758 ], [ "▁delivers", -11.612122535705566 ], [ "▁summary", -11.612236022949219 ], [ "▁beaches", -11.612459182739258 ], [ "lift", -11.612550735473633 ], [ "▁Suite", -11.612554550170898 ], [ "▁Assistant", -11.612688064575195 ], [ "▁taxi", -11.61273193359375 ], [ "▁peaceful", -11.612805366516113 ], [ "▁Mode", -11.612980842590332 ], [ "▁Fun", -11.613059043884277 ], [ "▁diameter", -11.613142967224121 ], [ "▁phrase", -11.613150596618652 ], [ "ACT", -11.613265037536621 ], [ "▁différentes", -11.613322257995605 ], [ "▁14.", -11.613417625427246 ], [ "▁CE", -11.61352825164795 ], [ "▁2)", -11.613739013671875 ], [ "▁Nat", -11.613785743713379 ], [ "▁delete", -11.61388111114502 ], [ "other", -11.613930702209473 ], [ "hang", -11.613985061645508 ], [ "▁sujet", -11.614117622375488 ], [ "▁precise", -11.614212989807129 ], [ "▁Total", -11.614290237426758 ], [ "▁chambre", -11.614483833312988 ], [ "sati", -11.614666938781738 ], [ "▁Metal", -11.614995956420898 ], [ "rust", -11.615038871765137 ], [ "▁Brazil", -11.615508079528809 ], [ "▁hybrid", -11.615636825561523 ], [ "ops", -11.615691184997559 ], [ "▁electro", -11.615789413452148 ], [ "utz", -11.61608600616455 ], [ "▁quoi", -11.616246223449707 ], [ "▁adoption", -11.616331100463867 ], [ "3.5", -11.616518020629883 ], [ "50,000", -11.616599082946777 ], [ "veti", -11.616630554199219 ], [ "hir", -11.616957664489746 ], [ "▁adequate", -11.617067337036133 ], [ "ologist", -11.617109298706055 ], [ "torii", -11.617295265197754 ], [ "wasser", -11.617355346679688 ], [ "▁Authority", -11.617362976074219 ], [ "▁donation", -11.617364883422852 ], [ "700", -11.617375373840332 ], [ "▁somehow", -11.617375373840332 ], [ "▁kostenlos", -11.617425918579102 ], [ "▁generations", -11.617537498474121 ], [ "▁Turkey", -11.617711067199707 ], [ "rata", -11.617819786071777 ], [ "▁animation", -11.618206024169922 ], [ "▁CH", -11.618281364440918 ], [ "ending", -11.618317604064941 ], [ "welt", -11.618376731872559 ], [ "bac", -11.618380546569824 ], [ "MG", -11.618460655212402 ], [ "▁parks", -11.618468284606934 ], [ "▁placing", -11.618870735168457 ], [ "sort", -11.61915111541748 ], [ "▁Bitcoin", -11.619163513183594 ], [ "▁disorder", -11.619282722473145 ], [ "MAN", -11.619302749633789 ], [ "aught", -11.619412422180176 ], [ "▁guides", -11.61956787109375 ], [ "▁circul", -11.619651794433594 ], [ "▁Steven", -11.619954109191895 ], [ "rrière", -11.619976997375488 ], [ "▁Arch", -11.61999225616455 ], [ "▁plates", -11.620091438293457 ], [ "MR", -11.620118141174316 ], [ "▁cow", -11.620142936706543 ], [ "▁integrity", -11.620210647583008 ], [ "▁(18", -11.620217323303223 ], [ "▁totul", -11.62024211883545 ], [ "jack", -11.620373725891113 ], [ "▁privire", -11.620588302612305 ], [ "▁terme", -11.620752334594727 ], [ "▁execution", -11.620781898498535 ], [ "▁organism", -11.620838165283203 ], [ "▁führen", -11.620853424072266 ], [ "▁patron", -11.620940208435059 ], [ "▁appreciated", -11.62096881866455 ], [ "liant", -11.62100601196289 ], [ "▁Solar", -11.621055603027344 ], [ "▁vinyl", -11.621134757995605 ], [ "▁treasure", -11.621137619018555 ], [ "▁retro", -11.621167182922363 ], [ "▁bout", -11.621174812316895 ], [ "lab", -11.621183395385742 ], [ "▁dimension", -11.621394157409668 ], [ "called", -11.62146282196045 ], [ "▁intern", -11.621479034423828 ], [ "issement", -11.62173843383789 ], [ "▁Erst", -11.621837615966797 ], [ "▁stellen", -11.621920585632324 ], [ "▁familia", -11.622069358825684 ], [ "▁notion", -11.622176170349121 ], [ "▁Could", -11.622322082519531 ], [ "Getting", -11.622323036193848 ], [ "▁drives", -11.622397422790527 ], [ "▁Israeli", -11.622520446777344 ], [ "▁nations", -11.622546195983887 ], [ "▁duties", -11.622700691223145 ], [ "▁personalized", -11.622788429260254 ], [ "▁weren", -11.62282657623291 ], [ "▁chemicals", -11.622847557067871 ], [ "▁killing", -11.622913360595703 ], [ "▁masa", -11.622994422912598 ], [ "▁parce", -11.623026847839355 ], [ "▁lady", -11.623178482055664 ], [ "ides", -11.623221397399902 ], [ "▁execut", -11.62340259552002 ], [ "▁floral", -11.62341594696045 ], [ "▁Child", -11.623428344726562 ], [ "▁medal", -11.623503684997559 ], [ "▁casa", -11.623603820800781 ], [ "▁enabled", -11.623650550842285 ], [ "12.", -11.624239921569824 ], [ "nger", -11.624266624450684 ], [ "▁vent", -11.624297142028809 ], [ "▁urmă", -11.624727249145508 ], [ "▁Herz", -11.624835968017578 ], [ "▁Jay", -11.624916076660156 ], [ ".....", -11.624942779541016 ], [ "▁Kris", -11.62499713897705 ], [ "kenn", -11.625001907348633 ], [ "ress", -11.625027656555176 ], [ "weight", -11.62519359588623 ], [ "▁indicates", -11.625198364257812 ], [ "▁mentor", -11.625328063964844 ], [ "using", -11.625386238098145 ], [ "▁femmes", -11.625460624694824 ], [ "▁Jung", -11.625528335571289 ], [ "▁Send", -11.625574111938477 ], [ "▁seasons", -11.625906944274902 ], [ "▁aesthetic", -11.625964164733887 ], [ "▁Block", -11.626086235046387 ], [ "▁babies", -11.626150131225586 ], [ "zig", -11.626242637634277 ], [ "edge", -11.626428604125977 ], [ "▁alike", -11.626458168029785 ], [ "▁immune", -11.626609802246094 ], [ "▁magical", -11.626710891723633 ], [ "▁Snow", -11.626748085021973 ], [ "▁spacious", -11.627058982849121 ], [ "▁Melbourne", -11.62706184387207 ], [ "order", -11.627081871032715 ], [ "▁timing", -11.627176284790039 ], [ "▁inainte", -11.627220153808594 ], [ "▁width", -11.627327919006348 ], [ "bild", -11.627386093139648 ], [ "Tra", -11.627429008483887 ], [ "▁appliances", -11.627449989318848 ], [ "▁dirt", -11.627498626708984 ], [ "▁Rent", -11.627689361572266 ], [ "responsibilities", -11.627747535705566 ], [ "▁blogs", -11.62778377532959 ], [ "nächsten", -11.627799034118652 ], [ "▁argue", -11.627928733825684 ], [ "▁Resume", -11.627985954284668 ], [ "▁Michel", -11.628044128417969 ], [ "▁terrible", -11.628092765808105 ], [ "graph", -11.628151893615723 ], [ "bird", -11.628202438354492 ], [ "▁Simple", -11.628457069396973 ], [ "nning", -11.628658294677734 ], [ "▁coconut", -11.628683090209961 ], [ "▁comprise", -11.628787994384766 ], [ "heure", -11.628918647766113 ], [ "▁nichts", -11.628921508789062 ], [ "▁manufacture", -11.628966331481934 ], [ "▁Sar", -11.629011154174805 ], [ "green", -11.629014015197754 ], [ "lining", -11.62910270690918 ], [ "▁tremendous", -11.629128456115723 ], [ "▁Wine", -11.629164695739746 ], [ "gir", -11.629290580749512 ], [ "▁Nothing", -11.629562377929688 ], [ "▁Miller", -11.62957763671875 ], [ "▁Schwe", -11.629712104797363 ], [ "zone", -11.629942893981934 ], [ "▁cunoscut", -11.629964828491211 ], [ "rupt", -11.630166053771973 ], [ "kle", -11.630187034606934 ], [ "▁Bucuresti", -11.630510330200195 ], [ "▁Abend", -11.630574226379395 ], [ "▁aura", -11.630583763122559 ], [ "▁Dance", -11.63073444366455 ], [ "▁Wilson", -11.63086986541748 ], [ "icide", -11.630901336669922 ], [ "bai", -11.630910873413086 ], [ "oriented", -11.63103199005127 ], [ "▁celebrated", -11.631421089172363 ], [ "schlag", -11.631531715393066 ], [ "▁10-", -11.631600379943848 ], [ "Unsere", -11.63167667388916 ], [ "énergie", -11.632009506225586 ], [ "▁qualify", -11.63205623626709 ], [ "▁contenu", -11.632177352905273 ], [ "▁Lauf", -11.63220500946045 ], [ "▁einzelne", -11.632360458374023 ], [ "▁Youth", -11.632415771484375 ], [ "explains", -11.632601737976074 ], [ "grat", -11.632782936096191 ], [ "▁72", -11.632804870605469 ], [ "labor", -11.632885932922363 ], [ "2018", -11.632940292358398 ], [ "▁Dank", -11.633149147033691 ], [ "▁Hey", -11.633523941040039 ], [ "▁refuse", -11.633536338806152 ], [ "▁graduated", -11.633599281311035 ], [ "▁României", -11.633627891540527 ], [ "punkt", -11.633807182312012 ], [ "▁regulation", -11.633834838867188 ], [ "Bru", -11.633842468261719 ], [ "▁Side", -11.633891105651855 ], [ "▁sol", -11.633970260620117 ], [ "▁extraordinary", -11.634182929992676 ], [ "▁ging", -11.634247779846191 ], [ "▁Creative", -11.634299278259277 ], [ "▁expanding", -11.634349822998047 ], [ "▁problème", -11.63444995880127 ], [ "▁Reserve", -11.63459300994873 ], [ "auteur", -11.634642601013184 ], [ "sphere", -11.634657859802246 ], [ "season", -11.634716987609863 ], [ "frei", -11.634756088256836 ], [ "▁8,", -11.634765625 ], [ "▁filing", -11.634810447692871 ], [ "▁Complete", -11.635017395019531 ], [ "▁revolution", -11.635035514831543 ], [ "▁unele", -11.63520622253418 ], [ "/8", -11.635272979736328 ], [ "istes", -11.635310173034668 ], [ "backed", -11.635400772094727 ], [ "shirt", -11.635554313659668 ], [ "▁Details", -11.635673522949219 ], [ "rod", -11.635695457458496 ], [ "▁pod", -11.63582992553711 ], [ "▁operators", -11.635921478271484 ], [ "was", -11.635930061340332 ], [ "hou", -11.63594913482666 ], [ "▁Coach", -11.636075019836426 ], [ "irii", -11.636138916015625 ], [ "▁ordinary", -11.636186599731445 ], [ "Institut", -11.63620662689209 ], [ "▁Flash", -11.63633918762207 ], [ "0-", -11.636537551879883 ], [ "▁flavour", -11.6367769241333 ], [ "specific", -11.636906623840332 ], [ "▁landing", -11.636930465698242 ], [ "▁geo", -11.636935234069824 ], [ "▁legend", -11.636983871459961 ], [ "vari", -11.63703441619873 ], [ "rop", -11.637084007263184 ], [ "▁Excel", -11.6370849609375 ], [ "▁Flu", -11.637203216552734 ], [ "▁intent", -11.637582778930664 ], [ "▁Deep", -11.637594223022461 ], [ "▁Kor", -11.63763427734375 ], [ "▁Philadelphia", -11.637914657592773 ], [ "▁rând", -11.63800048828125 ], [ "▁USD", -11.638033866882324 ], [ "laden", -11.63803482055664 ], [ "▁Hin", -11.638047218322754 ], [ "hap", -11.638197898864746 ], [ "▁thorough", -11.638227462768555 ], [ "▁oferit", -11.63826847076416 ], [ "kind", -11.63831615447998 ], [ "▁Cancer", -11.638428688049316 ], [ "apo", -11.638596534729004 ], [ "▁valve", -11.638650894165039 ], [ "▁encouraging", -11.63884449005127 ], [ "▁sûr", -11.638904571533203 ], [ "shing", -11.638981819152832 ], [ "▁49", -11.639132499694824 ], [ "gov", -11.639142990112305 ], [ "▁Five", -11.63933277130127 ], [ "▁stroke", -11.639344215393066 ], [ "▁apă", -11.639398574829102 ], [ "▁gambling", -11.639543533325195 ], [ "▁nord", -11.63963508605957 ], [ "onal", -11.639691352844238 ], [ "▁captured", -11.63979721069336 ], [ "▁lucruri", -11.640068054199219 ], [ "serait", -11.640192985534668 ], [ "▁Members", -11.640265464782715 ], [ "ital", -11.640275955200195 ], [ "▁mounted", -11.640475273132324 ], [ "▁opens", -11.640792846679688 ], [ "▁Marie", -11.640861511230469 ], [ "Tech", -11.640902519226074 ], [ "▁wishes", -11.641016006469727 ], [ "▁regards", -11.641073226928711 ], [ "going", -11.641156196594238 ], [ "Opti", -11.641250610351562 ], [ "▁femei", -11.641331672668457 ], [ "▁Fish", -11.64142894744873 ], [ "▁mount", -11.641800880432129 ], [ "▁Hunt", -11.641887664794922 ], [ "▁probabil", -11.64205265045166 ], [ "▁assured", -11.642191886901855 ], [ "pho", -11.642230033874512 ], [ "▁manufactured", -11.642313003540039 ], [ "▁realistic", -11.642437934875488 ], [ "ații", -11.642580032348633 ], [ "▁Planning", -11.642598152160645 ], [ "▁român", -11.642645835876465 ], [ "ggy", -11.642669677734375 ], [ "▁produces", -11.642696380615234 ], [ "▁reminder", -11.64284896850586 ], [ "TION", -11.642868041992188 ], [ "▁brake", -11.642909049987793 ], [ "▁pla", -11.643172264099121 ], [ "▁Premium", -11.643270492553711 ], [ "▁carb", -11.643310546875 ], [ "▁shine", -11.643390655517578 ], [ "▁carrier", -11.643492698669434 ], [ "▁poverty", -11.64350414276123 ], [ "▁effectiveness", -11.6436128616333 ], [ "administr", -11.643655776977539 ], [ "▁Chamber", -11.643658638000488 ], [ "▁suntem", -11.64376163482666 ], [ "▁noastră", -11.643855094909668 ], [ "▁sofort", -11.643877983093262 ], [ "▁moisture", -11.644058227539062 ], [ "limb", -11.6441011428833 ], [ "entre", -11.644328117370605 ], [ "▁SD", -11.644330978393555 ], [ "▁BC", -11.644539833068848 ], [ "▁selecting", -11.6445951461792 ], [ "achieving", -11.644673347473145 ], [ "info", -11.644735336303711 ], [ "▁membres", -11.644983291625977 ], [ "▁shoe", -11.645014762878418 ], [ "▁locate", -11.645065307617188 ], [ "▁assignment", -11.645085334777832 ], [ "lern", -11.645283699035645 ], [ "▁defeat", -11.645406723022461 ], [ "▁endless", -11.645458221435547 ], [ "▁Stunden", -11.645523071289062 ], [ "то", -11.645561218261719 ], [ "▁mur", -11.645586013793945 ], [ "▁wissen", -11.645844459533691 ], [ "aime", -11.645915031433105 ], [ "1-2", -11.646056175231934 ], [ "▁femme", -11.646212577819824 ], [ "robe", -11.646468162536621 ], [ "▁embrace", -11.64647102355957 ], [ "▁baseball", -11.646614074707031 ], [ "▁hunting", -11.64663314819336 ], [ "betrieb", -11.646790504455566 ], [ "▁gardens", -11.647045135498047 ], [ "▁risc", -11.647096633911133 ], [ "▁Cri", -11.647263526916504 ], [ "best", -11.647506713867188 ], [ "▁Audio", -11.647621154785156 ], [ "▁intens", -11.647659301757812 ], [ "▁Round", -11.647744178771973 ], [ "▁fireplace", -11.6478271484375 ], [ "▁dozen", -11.647912979125977 ], [ "▁hospitals", -11.64802360534668 ], [ "▁profits", -11.648076057434082 ], [ "▁Mail", -11.64811897277832 ], [ "obtenir", -11.648191452026367 ], [ "▁Ross", -11.648241996765137 ], [ "bun", -11.648573875427246 ], [ "polar", -11.648688316345215 ], [ "▁reflection", -11.648873329162598 ], [ "▁fut", -11.648992538452148 ], [ "phon", -11.649017333984375 ], [ "deck", -11.649094581604004 ], [ "renowned", -11.649188041687012 ], [ "▁cate", -11.649308204650879 ], [ "▁decorative", -11.6494722366333 ], [ "ieri", -11.64957332611084 ], [ "▁Tap", -11.64958381652832 ], [ "▁Dallas", -11.649600982666016 ], [ "rik", -11.649665832519531 ], [ "▁pied", -11.649727821350098 ], [ "rés", -11.649821281433105 ], [ "ppy", -11.650137901306152 ], [ "▁bitte", -11.650188446044922 ], [ "▁cave", -11.650257110595703 ], [ "▁rescue", -11.650559425354004 ], [ "▁Hilfe", -11.650714874267578 ], [ "▁Jason", -11.650786399841309 ], [ "▁Nations", -11.650838851928711 ], [ "▁profil", -11.650938987731934 ], [ "▁Atlantic", -11.651105880737305 ], [ "▁rub", -11.651126861572266 ], [ "▁collaborative", -11.65113353729248 ], [ "étude", -11.651150703430176 ], [ "▁Workshop", -11.651389122009277 ], [ "nez", -11.651628494262695 ], [ "▁chacun", -11.651714324951172 ], [ "▁Too", -11.65211296081543 ], [ "App", -11.652313232421875 ], [ "▁conseil", -11.652399063110352 ], [ "▁signals", -11.652474403381348 ], [ "▁Dead", -11.652497291564941 ], [ "▁Austria", -11.652522087097168 ], [ "▁slots", -11.652579307556152 ], [ "▁Dies", -11.652623176574707 ], [ "raj", -11.652629852294922 ], [ "stick", -11.652833938598633 ], [ "▁jaw", -11.653030395507812 ], [ "▁lounge", -11.653059005737305 ], [ "curi", -11.653359413146973 ], [ "nem", -11.653456687927246 ], [ "▁Cluj", -11.653512954711914 ], [ "▁rapide", -11.653584480285645 ], [ "▁companion", -11.653716087341309 ], [ "▁WE", -11.653879165649414 ], [ "▁bord", -11.65389347076416 ], [ "ody", -11.654045104980469 ], [ "gru", -11.654057502746582 ], [ "▁46", -11.654410362243652 ], [ "kra", -11.654717445373535 ], [ "eller", -11.65477180480957 ], [ "naire", -11.65511703491211 ], [ "hose", -11.655253410339355 ], [ "▁Atlanta", -11.655254364013672 ], [ "▁violent", -11.65530776977539 ], [ "▁imagination", -11.655352592468262 ], [ "▁reward", -11.655389785766602 ], [ "▁Korean", -11.655441284179688 ], [ "▁branches", -11.655501365661621 ], [ "▁GPS", -11.655625343322754 ], [ "glo", -11.655633926391602 ], [ "▁condo", -11.655705451965332 ], [ "▁Investment", -11.655765533447266 ], [ "▁involvement", -11.655813217163086 ], [ "▁trap", -11.655829429626465 ], [ "▁schön", -11.655872344970703 ], [ "▁ofera", -11.655933380126953 ], [ "▁unterschiedlich", -11.65596866607666 ], [ "Net", -11.655987739562988 ], [ "▁predict", -11.656113624572754 ], [ "identifying", -11.656309127807617 ], [ "▁noir", -11.6566162109375 ], [ "kos", -11.656816482543945 ], [ "poz", -11.656816482543945 ], [ "▁11,", -11.65698528289795 ], [ "▁fitted", -11.657384872436523 ], [ "MU", -11.657469749450684 ], [ "TT", -11.657645225524902 ], [ "▁vrea", -11.657846450805664 ], [ "▁wound", -11.657864570617676 ], [ "lac", -11.657971382141113 ], [ "▁purchases", -11.658409118652344 ], [ "▁Cape", -11.65843677520752 ], [ "▁Foto", -11.658537864685059 ], [ "▁acres", -11.65865707397461 ], [ "▁nec", -11.658677101135254 ], [ "▁burning", -11.659050941467285 ], [ "conf", -11.659457206726074 ], [ "▁browse", -11.659486770629883 ], [ "ural", -11.659762382507324 ], [ "▁Ah", -11.659841537475586 ], [ "▁stellt", -11.65992259979248 ], [ "▁ratings", -11.660012245178223 ], [ "▁Bowl", -11.660027503967285 ], [ "▁grav", -11.660289764404297 ], [ "titi", -11.66048526763916 ], [ "▁prêt", -11.66075325012207 ], [ "▁fallen", -11.660818099975586 ], [ "▁nombreuses", -11.660940170288086 ], [ "train", -11.660953521728516 ], [ "ène", -11.661009788513184 ], [ "Aceasta", -11.661091804504395 ], [ "▁drill", -11.661421775817871 ], [ "▁Exam", -11.661477088928223 ], [ "▁Furniture", -11.661651611328125 ], [ "eanu", -11.661919593811035 ], [ "étant", -11.66230297088623 ], [ "sville", -11.662391662597656 ], [ "▁swim", -11.662796020507812 ], [ "▁routes", -11.662826538085938 ], [ "INE", -11.662860870361328 ], [ "▁Por", -11.662976264953613 ], [ "ither", -11.663168907165527 ], [ "▁optim", -11.663180351257324 ], [ "▁lua", -11.66331958770752 ], [ "▁myth", -11.663491249084473 ], [ "▁Bett", -11.6635103225708 ], [ "chim", -11.66355037689209 ], [ "▁cyber", -11.663553237915039 ], [ "▁engineer", -11.663825035095215 ], [ "▁exploration", -11.663918495178223 ], [ "arranged", -11.663973808288574 ], [ "▁aged", -11.663993835449219 ], [ "▁beau", -11.664024353027344 ], [ "OUT", -11.66402530670166 ], [ "▁Minnesota", -11.664031982421875 ], [ "tress", -11.664407730102539 ], [ "▁Commercial", -11.664509773254395 ], [ "▁inspiring", -11.66462516784668 ], [ "▁Mare", -11.664725303649902 ], [ "apa", -11.665140151977539 ], [ "▁ignore", -11.6651611328125 ], [ "▁gros", -11.665186882019043 ], [ "▁measurement", -11.66531753540039 ], [ "ager", -11.665395736694336 ], [ "intele", -11.665966987609863 ], [ "▁suspension", -11.666180610656738 ], [ "▁cultures", -11.666211128234863 ], [ "▁Wow", -11.666231155395508 ], [ "▁pushing", -11.666363716125488 ], [ "▁bands", -11.666438102722168 ], [ "nage", -11.666450500488281 ], [ "▁Math", -11.666515350341797 ], [ "comb", -11.66658878326416 ], [ "▁créer", -11.66658878326416 ], [ "▁Lewis", -11.666685104370117 ], [ "▁VI", -11.66678524017334 ], [ "emploi", -11.666791915893555 ], [ "▁elections", -11.666890144348145 ], [ "▁logic", -11.666982650756836 ], [ "▁unlike", -11.667122840881348 ], [ "▁Matthew", -11.66743278503418 ], [ "▁pă", -11.667486190795898 ], [ "oxy", -11.667620658874512 ], [ "équipe", -11.667717933654785 ], [ "▁worden", -11.668088912963867 ], [ "dev", -11.668258666992188 ], [ "▁Massachusetts", -11.668691635131836 ], [ "▁Return", -11.668695449829102 ], [ "▁Friends", -11.66891098022461 ], [ "▁movements", -11.66894245147705 ], [ "chie", -11.668964385986328 ], [ "rak", -11.669017791748047 ], [ "▁Fit", -11.66904354095459 ], [ "▁copil", -11.669113159179688 ], [ "iunii", -11.669188499450684 ], [ "▁intensive", -11.669234275817871 ], [ "▁rug", -11.669452667236328 ], [ "lichkeit", -11.669686317443848 ], [ "kov", -11.669724464416504 ], [ "▁pense", -11.66978645324707 ], [ "pop", -11.66978931427002 ], [ "▁closet", -11.669865608215332 ], [ "▁prevention", -11.669920921325684 ], [ "▁Deb", -11.670256614685059 ], [ "▁devant", -11.670430183410645 ], [ "▁construit", -11.670440673828125 ], [ "▁breaks", -11.67082405090332 ], [ "otic", -11.670886993408203 ], [ "▁dig", -11.67088794708252 ], [ "▁près", -11.670930862426758 ], [ "chte", -11.671029090881348 ], [ "▁Chat", -11.671029090881348 ], [ "wel", -11.671219825744629 ], [ "▁edges", -11.671272277832031 ], [ "▁keen", -11.671419143676758 ], [ "▁infant", -11.671716690063477 ], [ "▁Hills", -11.6719388961792 ], [ "▁grounds", -11.671969413757324 ], [ "▁hab", -11.672039031982422 ], [ "▁Mun", -11.67215347290039 ], [ "▁references", -11.672215461730957 ], [ "▁hearts", -11.672446250915527 ], [ "exprim", -11.672487258911133 ], [ "▁tratament", -11.672553062438965 ], [ "LD", -11.67258358001709 ], [ "ssel", -11.67275333404541 ], [ "cover", -11.672782897949219 ], [ "bridge", -11.672837257385254 ], [ "▁Wein", -11.672924995422363 ], [ "▁voiture", -11.673035621643066 ], [ "▁Gemeinde", -11.67313289642334 ], [ "AI", -11.673169136047363 ], [ "▁renovation", -11.673264503479004 ], [ "bid", -11.673285484313965 ], [ "▁Reading", -11.673481941223145 ], [ "▁Gor", -11.673490524291992 ], [ "fur", -11.673527717590332 ], [ "▁Yoga", -11.673544883728027 ], [ "▁exclusively", -11.673630714416504 ], [ "▁emissions", -11.67385482788086 ], [ "ète", -11.673905372619629 ], [ "▁glasses", -11.674055099487305 ], [ "▁organizat", -11.674135208129883 ], [ "▁washing", -11.67415714263916 ], [ "▁Audi", -11.674173355102539 ], [ "▁Labor", -11.674331665039062 ], [ "▁legacy", -11.674381256103516 ], [ "▁abstract", -11.674519538879395 ], [ "▁knowledgeable", -11.674601554870605 ], [ "▁Glo", -11.674795150756836 ], [ "▁pregnant", -11.67481803894043 ], [ "liter", -11.674851417541504 ], [ "▁paintings", -11.67522144317627 ], [ "▁tête", -11.675244331359863 ], [ "voy", -11.675626754760742 ], [ "▁Jacob", -11.675667762756348 ], [ "▁dressing", -11.675679206848145 ], [ "▁provisions", -11.675768852233887 ], [ "bahn", -11.675870895385742 ], [ "▁depict", -11.675875663757324 ], [ "AW", -11.676068305969238 ], [ "▁bleibt", -11.676163673400879 ], [ "AND", -11.676292419433594 ], [ "▁fünf", -11.676386833190918 ], [ "▁hosts", -11.676426887512207 ], [ "vas", -11.676708221435547 ], [ "DO", -11.67674732208252 ], [ "▁max", -11.676753997802734 ], [ "▁contributed", -11.676774978637695 ], [ "roz", -11.676796913146973 ], [ "▁deschis", -11.676800727844238 ], [ "itaire", -11.676809310913086 ], [ "tube", -11.676959991455078 ], [ "▁Beck", -11.676959991455078 ], [ "▁curious", -11.677130699157715 ], [ "▁waves", -11.677178382873535 ], [ "▁regret", -11.677248001098633 ], [ "FO", -11.677326202392578 ], [ "droit", -11.67734146118164 ], [ "rö", -11.677565574645996 ], [ "▁Panel", -11.677624702453613 ], [ "▁pile", -11.677660942077637 ], [ "▁installing", -11.677674293518066 ], [ "▁Intr", -11.677797317504883 ], [ "nung", -11.677823066711426 ], [ "▁Outdoor", -11.677855491638184 ], [ "▁generator", -11.67786693572998 ], [ "▁zahlreiche", -11.677868843078613 ], [ "▁Third", -11.67813491821289 ], [ "frac", -11.678180694580078 ], [ "ovi", -11.678236961364746 ], [ "▁Casa", -11.678374290466309 ], [ "▁stomach", -11.678393363952637 ], [ "▁Lincoln", -11.67844009399414 ], [ "▁Electronic", -11.678584098815918 ], [ "coding", -11.67895221710205 ], [ "2017", -11.67900276184082 ], [ "▁friendship", -11.679238319396973 ], [ "ried", -11.679250717163086 ], [ "но", -11.679265022277832 ], [ "▁tail", -11.679267883300781 ], [ "▁petits", -11.679308891296387 ], [ "▁réseau", -11.679696083068848 ], [ "▁churches", -11.679999351501465 ], [ "▁marketplace", -11.680062294006348 ], [ "▁Pool", -11.680318832397461 ], [ "▁popularity", -11.680455207824707 ], [ "▁sprijin", -11.680496215820312 ], [ "▁Od", -11.680527687072754 ], [ "▁Transfer", -11.680562973022461 ], [ "▁fake", -11.680791854858398 ], [ "▁9,", -11.681007385253906 ], [ "▁weit", -11.681264877319336 ], [ "▁relaxed", -11.681415557861328 ], [ "pig", -11.68161678314209 ], [ "▁Lauren", -11.68166732788086 ], [ "gesetzt", -11.681669235229492 ], [ "▁Clar", -11.681694984436035 ], [ "▁unlikely", -11.681731224060059 ], [ "color", -11.681832313537598 ], [ "▁spouse", -11.681843757629395 ], [ "▁facile", -11.681859970092773 ], [ "▁Speed", -11.681872367858887 ], [ "KE", -11.682230949401855 ], [ "▁PO", -11.68231201171875 ], [ "▁Channel", -11.682321548461914 ], [ "argent", -11.682356834411621 ], [ "▁Making", -11.682430267333984 ], [ "▁Coll", -11.682585716247559 ], [ "cci", -11.682721138000488 ], [ "corresponding", -11.68300724029541 ], [ "▁heaven", -11.683160781860352 ], [ "ţă", -11.68319320678711 ], [ "▁darüber", -11.683236122131348 ], [ "acted", -11.683420181274414 ], [ "only", -11.683460235595703 ], [ "▁slight", -11.683465003967285 ], [ "lian", -11.68348503112793 ], [ "flă", -11.683510780334473 ], [ "▁vulnerable", -11.683530807495117 ], [ "▁creator", -11.68356704711914 ], [ "▁protecting", -11.68360424041748 ], [ "writing", -11.68360710144043 ], [ "▁Ter", -11.68387222290039 ], [ "▁barb", -11.683987617492676 ], [ "▁dată", -11.683995246887207 ], [ "▁Screen", -11.684052467346191 ], [ "▁BBC", -11.684082984924316 ], [ "Col", -11.684206008911133 ], [ "fung", -11.684453964233398 ], [ "▁dreptul", -11.684494972229004 ], [ "derived", -11.684538841247559 ], [ "▁designated", -11.684553146362305 ], [ "▁interactions", -11.684617042541504 ], [ "SG", -11.684621810913086 ], [ "▁häufig", -11.684625625610352 ], [ "▁Mega", -11.684638023376465 ], [ "▁jazz", -11.684660911560059 ], [ "lbs", -11.684797286987305 ], [ "▁Manual", -11.68484115600586 ], [ "pushed", -11.685017585754395 ], [ "▁analytics", -11.685234069824219 ], [ "▁lawsuit", -11.68533706665039 ], [ "▁gray", -11.685364723205566 ], [ "shirts", -11.685401916503906 ], [ "▁hill", -11.685508728027344 ], [ "▁1991", -11.68550968170166 ], [ "▁obligations", -11.685568809509277 ], [ "▁Dubai", -11.68580436706543 ], [ "()", -11.685808181762695 ], [ "▁acceptable", -11.685810089111328 ], [ "therapist", -11.685877799987793 ], [ "inger", -11.6860990524292 ], [ "▁territory", -11.686208724975586 ], [ "▁sang", -11.6862211227417 ], [ "ät", -11.686224937438965 ], [ "▁Zukunft", -11.686238288879395 ], [ "TU", -11.68657398223877 ], [ "▁horizontal", -11.68665599822998 ], [ "▁entrepreneurs", -11.686710357666016 ], [ "▁Eltern", -11.687017440795898 ], [ "▁presentations", -11.687129974365234 ], [ "▁confirmation", -11.687173843383789 ], [ "▁technological", -11.687432289123535 ], [ "▁1989", -11.687530517578125 ], [ "EF", -11.687640190124512 ], [ "ponent", -11.687663078308105 ], [ "NET", -11.687699317932129 ], [ "750", -11.687772750854492 ], [ "▁desert", -11.687891960144043 ], [ "▁contribu", -11.687932968139648 ], [ "▁Gun", -11.687944412231445 ], [ "▁Juli", -11.688091278076172 ], [ "ERS", -11.688261985778809 ], [ "▁inceput", -11.688261985778809 ], [ "▁answered", -11.688369750976562 ], [ "▁basement", -11.688410758972168 ], [ "film", -11.688434600830078 ], [ "▁taille", -11.688593864440918 ], [ "▁survival", -11.688655853271484 ], [ "ihnen", -11.68869400024414 ], [ "▁Bird", -11.688840866088867 ], [ "speed", -11.689336776733398 ], [ "▁journalist", -11.68941879272461 ], [ "▁Indonesia", -11.689626693725586 ], [ "▁15.", -11.689973831176758 ], [ "▁19.", -11.690025329589844 ], [ "étaient", -11.690114974975586 ], [ "▁tennis", -11.69024658203125 ], [ "▁aproximativ", -11.69039249420166 ], [ "▁Hans", -11.690650939941406 ], [ "▁Remove", -11.69067096710205 ], [ "▁cats", -11.691022872924805 ], [ "▁calories", -11.691052436828613 ], [ "▁limitations", -11.69119644165039 ], [ "▁subscribe", -11.691198348999023 ], [ "▁Dem", -11.691339492797852 ], [ "lust", -11.691370010375977 ], [ "▁adresa", -11.691394805908203 ], [ "▁sais", -11.69140911102295 ], [ "...\"", -11.691473960876465 ], [ "▁Luft", -11.691485404968262 ], [ "DL", -11.691597938537598 ], [ "▁estimates", -11.691600799560547 ], [ "▁protocol", -11.691603660583496 ], [ "▁Namen", -11.691776275634766 ], [ "▁grands", -11.691901206970215 ], [ "▁voter", -11.691970825195312 ], [ "▁vacuum", -11.692075729370117 ], [ "▁versch", -11.692103385925293 ], [ "▁Democratic", -11.692107200622559 ], [ "▁Books", -11.692170143127441 ], [ "▁frames", -11.692727088928223 ], [ "▁Bee", -11.692864418029785 ], [ "▁helfen", -11.692934036254883 ], [ "▁dive", -11.692963600158691 ], [ "▁physician", -11.693037033081055 ], [ "▁powered", -11.693131446838379 ], [ "▁zones", -11.693337440490723 ], [ "▁regime", -11.69345474243164 ], [ "check", -11.693578720092773 ], [ "11.", -11.693793296813965 ], [ "▁plaisir", -11.693793296813965 ], [ "▁physically", -11.693811416625977 ], [ "▁Pul", -11.694245338439941 ], [ "▁jardin", -11.694294929504395 ], [ "▁Nur", -11.694417953491211 ], [ "WC", -11.694425582885742 ], [ "▁Lock", -11.694506645202637 ], [ "▁économique", -11.694530487060547 ], [ "user", -11.694536209106445 ], [ "▁commit", -11.694731712341309 ], [ "▁oldest", -11.694764137268066 ], [ "▁fulfill", -11.694780349731445 ], [ "▁nervous", -11.69482135772705 ], [ "▁SH", -11.695014953613281 ], [ "SK", -11.695150375366211 ], [ "▁plein", -11.695291519165039 ], [ "show", -11.695354461669922 ], [ "▁disability", -11.695356369018555 ], [ "papier", -11.69544506072998 ], [ "▁Corp", -11.695611000061035 ], [ "ători", -11.695676803588867 ], [ "nţă", -11.695813179016113 ], [ "▁overseas", -11.696009635925293 ], [ "▁struck", -11.69603157043457 ], [ "astic", -11.69607162475586 ], [ "▁advised", -11.696088790893555 ], [ "BE", -11.696161270141602 ], [ "▁UV", -11.696218490600586 ], [ "patient", -11.69626235961914 ], [ "▁texte", -11.696344375610352 ], [ "▁timely", -11.696444511413574 ], [ "used", -11.696471214294434 ], [ "▁occasionally", -11.696524620056152 ], [ "▁entries", -11.696550369262695 ], [ "underlying", -11.6967191696167 ], [ "01.", -11.696748733520508 ], [ "▁automated", -11.696791648864746 ], [ "yes", -11.696828842163086 ], [ "▁Staff", -11.697057723999023 ], [ "▁Einzel", -11.697546005249023 ], [ "quit", -11.697687149047852 ], [ "▁Cela", -11.697951316833496 ], [ "▁snap", -11.698298454284668 ], [ "▁followers", -11.698330879211426 ], [ "CN", -11.698709487915039 ], [ "▁Cooper", -11.698892593383789 ], [ "ô", -11.698921203613281 ], [ "▁memorable", -11.698965072631836 ], [ "▁jur", -11.698996543884277 ], [ "▁ajutorul", -11.69905948638916 ], [ "▁Enter", -11.6991548538208 ], [ "Often", -11.699294090270996 ], [ "▁dintr", -11.699341773986816 ], [ "-30", -11.699419975280762 ], [ "ESS", -11.699454307556152 ], [ "▁weird", -11.699462890625 ], [ "▁Animal", -11.699706077575684 ], [ "▁complement", -11.699719429016113 ], [ "▁Bot", -11.699756622314453 ], [ "▁darf", -11.699764251708984 ], [ "yed", -11.699808120727539 ], [ "▁Mul", -11.699872016906738 ], [ "lick", -11.700080871582031 ], [ "▁Cambridge", -11.700216293334961 ], [ "adore", -11.700407981872559 ], [ "▁Dutch", -11.700420379638672 ], [ "▁Castle", -11.700431823730469 ], [ "igi", -11.700563430786133 ], [ "▁enemy", -11.70071029663086 ], [ "accompanied", -11.700725555419922 ], [ "▁teren", -11.701102256774902 ], [ "▁ET", -11.701498985290527 ], [ "ffle", -11.701557159423828 ], [ "-15", -11.701651573181152 ], [ "▁Geo", -11.701680183410645 ], [ "▁attractions", -11.701730728149414 ], [ "iker", -11.70185661315918 ], [ "▁bă", -11.701990127563477 ], [ "▁heal", -11.701995849609375 ], [ "weisen", -11.702144622802734 ], [ "▁spectrum", -11.702186584472656 ], [ "meld", -11.702394485473633 ], [ "▁eveniment", -11.70247745513916 ], [ "arra", -11.702478408813477 ], [ "rete", -11.70250129699707 ], [ "▁Had", -11.70250415802002 ], [ "looking", -11.702692031860352 ], [ "isierung", -11.702805519104004 ], [ "▁moyen", -11.703129768371582 ], [ "▁gesamte", -11.703202247619629 ], [ "▁destroy", -11.703407287597656 ], [ "125", -11.703518867492676 ], [ "▁suivant", -11.703913688659668 ], [ "▁declared", -11.703925132751465 ], [ "▁Urban", -11.704131126403809 ], [ "▁16.", -11.704168319702148 ], [ "▁Beg", -11.704168319702148 ], [ "▁canal", -11.704225540161133 ], [ "▁Pres", -11.70431137084961 ], [ "▁geeignet", -11.704339981079102 ], [ "▁strat", -11.704365730285645 ], [ "UB", -11.704395294189453 ], [ "▁Alexander", -11.704424858093262 ], [ "cycle", -11.704666137695312 ], [ "▁Var", -11.704802513122559 ], [ "▁domin", -11.704805374145508 ], [ "▁lasting", -11.704939842224121 ], [ "terio", -11.705262184143066 ], [ "▁Battle", -11.705339431762695 ], [ "▁publications", -11.705647468566895 ], [ "▁implica", -11.705886840820312 ], [ "▁NA", -11.705963134765625 ], [ "▁stocks", -11.706036567687988 ], [ "Plat", -11.70611572265625 ], [ "▁excitement", -11.706149101257324 ], [ "▁Muslim", -11.706524848937988 ], [ "▁Mari", -11.706530570983887 ], [ "▁Ul", -11.706647872924805 ], [ "nächst", -11.706757545471191 ], [ "▁trait", -11.706833839416504 ], [ "▁(3)", -11.706852912902832 ], [ "▁Attorney", -11.706894874572754 ], [ "▁Malaysia", -11.70689582824707 ], [ "▁slab", -11.706960678100586 ], [ "▁dam", -11.707113265991211 ], [ "▁Bir", -11.707226753234863 ], [ "▁sing", -11.70738410949707 ], [ "▁Culture", -11.7073974609375 ], [ "UD", -11.707417488098145 ], [ "▁Mes", -11.707443237304688 ], [ "ități", -11.707615852355957 ], [ "▁possess", -11.708173751831055 ], [ "enabling", -11.70820426940918 ], [ "▁settled", -11.708335876464844 ], [ "▁sagen", -11.708492279052734 ], [ "▁erfolgt", -11.708564758300781 ], [ "dog", -11.708600997924805 ], [ "ndu", -11.708732604980469 ], [ "ității", -11.708745002746582 ], [ "▁Islam", -11.708930015563965 ], [ "▁catalog", -11.708931922912598 ], [ "▁simt", -11.709102630615234 ], [ "tische", -11.709150314331055 ], [ "▁Mach", -11.709334373474121 ], [ "▁EP", -11.709359169006348 ], [ "▁Certified", -11.709386825561523 ], [ "▁Resources", -11.70945930480957 ], [ "▁Past", -11.709607124328613 ], [ "▁Termin", -11.709755897521973 ], [ "▁lightweight", -11.709755897521973 ], [ "▁championship", -11.70994758605957 ], [ "gebiet", -11.710122108459473 ], [ "▁jurisdiction", -11.710135459899902 ], [ "▁euros", -11.710169792175293 ], [ "▁Familien", -11.710554122924805 ], [ "▁GT", -11.710677146911621 ], [ "▁dvs", -11.71081256866455 ], [ "▁nouveaux", -11.710838317871094 ], [ "▁chill", -11.710916519165039 ], [ "▁ridicat", -11.710920333862305 ], [ "his", -11.711079597473145 ], [ "▁Indi", -11.711159706115723 ], [ "▁arrested", -11.71116828918457 ], [ "ităţii", -11.711170196533203 ], [ "onul", -11.711274147033691 ], [ "appar", -11.711296081542969 ], [ "▁Bachelor", -11.711297988891602 ], [ "▁erfolgreich", -11.711426734924316 ], [ "▁versatile", -11.71163558959961 ], [ "▁nécessaire", -11.711761474609375 ], [ "▁facial", -11.712160110473633 ], [ "▁Bull", -11.712226867675781 ], [ "Comm", -11.712237358093262 ], [ "atte", -11.712307929992676 ], [ "hom", -11.7123384475708 ], [ "start", -11.712576866149902 ], [ "▁roughly", -11.712936401367188 ], [ "▁bay", -11.712984085083008 ], [ "▁american", -11.712986946105957 ], [ "▁Wisconsin", -11.713135719299316 ], [ "▁Clinton", -11.713142395019531 ], [ "appareil", -11.713153839111328 ], [ "▁liberal", -11.713455200195312 ], [ "▁dau", -11.713519096374512 ], [ "ech", -11.713521957397461 ], [ "2014", -11.713624000549316 ], [ "▁lip", -11.713645935058594 ], [ "▁maintenant", -11.713762283325195 ], [ "▁Sil", -11.713805198669434 ], [ "rben", -11.713891983032227 ], [ "▁contents", -11.713980674743652 ], [ "▁magnetic", -11.714111328125 ], [ "▁terre", -11.714151382446289 ], [ "▁Rights", -11.714475631713867 ], [ "lose", -11.714570045471191 ], [ "▁crown", -11.71468448638916 ], [ "▁oils", -11.7147216796875 ], [ "▁entertaining", -11.714841842651367 ], [ "▁Option", -11.714848518371582 ], [ "▁Previous", -11.714916229248047 ], [ "▁vrai", -11.714930534362793 ], [ "▁Auswahl", -11.715056419372559 ], [ "▁horses", -11.715106010437012 ], [ "▁Author", -11.71533489227295 ], [ "▁Writing", -11.715461730957031 ], [ "▁travelling", -11.715522766113281 ], [ "▁350", -11.715567588806152 ], [ "daten", -11.71560287475586 ], [ "zan", -11.715765953063965 ], [ "▁sweat", -11.715924263000488 ], [ "▁Junior", -11.715970993041992 ], [ "markt", -11.71609878540039 ], [ "after", -11.716105461120605 ], [ "▁admitted", -11.716262817382812 ], [ "▁1950", -11.716347694396973 ], [ "▁Sche", -11.71648120880127 ], [ "▁dorit", -11.716818809509277 ], [ "▁transferred", -11.716958045959473 ], [ "utilise", -11.717194557189941 ], [ "sitz", -11.717301368713379 ], [ "gio", -11.717320442199707 ], [ "▁bisher", -11.717473983764648 ], [ "RD", -11.717491149902344 ], [ "▁Wales", -11.717747688293457 ], [ "▁smoking", -11.717904090881348 ], [ "dire", -11.717939376831055 ], [ "▁seating", -11.717979431152344 ], [ "▁constat", -11.718056678771973 ], [ "▁Hub", -11.718324661254883 ], [ "▁sieht", -11.718345642089844 ], [ "▁prospect", -11.718378067016602 ], [ "▁RO", -11.718413352966309 ], [ "▁Wars", -11.718423843383789 ], [ "eek", -11.718496322631836 ], [ "▁Bring", -11.718646049499512 ], [ "▁bleiben", -11.718696594238281 ], [ "arri", -11.718826293945312 ], [ "inal", -11.718904495239258 ], [ "▁Maryland", -11.718932151794434 ], [ "▁Process", -11.719145774841309 ], [ "They", -11.719154357910156 ], [ "▁Oxford", -11.719176292419434 ], [ "▁neat", -11.719330787658691 ], [ "▁cinema", -11.719597816467285 ], [ "▁Ist", -11.719620704650879 ], [ "▁vegan", -11.719682693481445 ], [ "wall", -11.719708442687988 ], [ "▁motive", -11.72010612487793 ], [ "▁mature", -11.720544815063477 ], [ "▁Dragon", -11.720653533935547 ], [ "▁google", -11.720677375793457 ], [ "blick", -11.72110652923584 ], [ "▁Cod", -11.721220970153809 ], [ "▁suffi", -11.721319198608398 ], [ "▁terrorist", -11.721478462219238 ], [ "Posted", -11.721484184265137 ], [ "▁Schi", -11.72157096862793 ], [ "▁Marc", -11.721597671508789 ], [ "▁operates", -11.721661567687988 ], [ "gress", -11.721805572509766 ], [ "has", -11.721899032592773 ], [ "sole", -11.722108840942383 ], [ "▁Buck", -11.722122192382812 ], [ "impl", -11.722160339355469 ], [ "▁Ron", -11.722172737121582 ], [ "▁handled", -11.722346305847168 ], [ "▁Apr", -11.722347259521484 ], [ "▁Storage", -11.722467422485352 ], [ "▁temp", -11.722512245178223 ], [ "▁differently", -11.722614288330078 ], [ "▁wherever", -11.722670555114746 ], [ "matched", -11.722695350646973 ], [ "rios", -11.72276496887207 ], [ "▁surprising", -11.722846031188965 ], [ "teilen", -11.722867965698242 ], [ "▁difficulties", -11.72294807434082 ], [ "tab", -11.723064422607422 ], [ "▁Leader", -11.723128318786621 ], [ "implementing", -11.723372459411621 ], [ "▁workforce", -11.723384857177734 ], [ "▁bereit", -11.723503112792969 ], [ "vig", -11.72352123260498 ], [ "▁LOVE", -11.723580360412598 ], [ "▁instances", -11.723954200744629 ], [ "▁frumos", -11.723960876464844 ], [ "▁Java", -11.723974227905273 ], [ "▁arrest", -11.723977088928223 ], [ "▁apparent", -11.724152565002441 ], [ "▁hence", -11.724200248718262 ], [ "▁entwickelt", -11.72437572479248 ], [ "▁Fra", -11.724471092224121 ], [ "▁prend", -11.724486351013184 ], [ "ließ", -11.724522590637207 ], [ "▁drawer", -11.724671363830566 ], [ "ARD", -11.724926948547363 ], [ "▁caring", -11.72499942779541 ], [ "▁wollte", -11.725024223327637 ], [ "▁vielleicht", -11.72511100769043 ], [ "▁iconic", -11.725324630737305 ], [ "äch", -11.72552490234375 ], [ "abel", -11.725639343261719 ], [ "▁génér", -11.72570514678955 ], [ "ault", -11.725727081298828 ], [ "▁alternatives", -11.725909233093262 ], [ "think", -11.726025581359863 ], [ "ро", -11.726055145263672 ], [ "whereas", -11.726058006286621 ], [ "erei", -11.726366996765137 ], [ "▁Eagle", -11.726766586303711 ], [ "situé", -11.72704792022705 ], [ "▁laboratory", -11.727157592773438 ], [ "▁Nutzung", -11.727256774902344 ], [ "▁Bathroom", -11.72728157043457 ], [ "▁loaded", -11.727293968200684 ], [ "niste", -11.727408409118652 ], [ "som", -11.727429389953613 ], [ "▁aucun", -11.727666854858398 ], [ "gebracht", -11.727676391601562 ], [ "▁tomb", -11.727771759033203 ], [ "▁Ty", -11.727785110473633 ], [ "▁afaceri", -11.727971076965332 ], [ "tex", -11.72803783416748 ], [ "ality", -11.728147506713867 ], [ "▁identification", -11.728150367736816 ], [ "▁cultiv", -11.728255271911621 ], [ "Not", -11.728326797485352 ], [ "▁acestor", -11.72846508026123 ], [ "▁PhD", -11.728466033935547 ], [ "nell", -11.728470802307129 ], [ "▁dial", -11.728594779968262 ], [ "chro", -11.728673934936523 ], [ "▁specifications", -11.728682518005371 ], [ "anii", -11.72877025604248 ], [ "▁cloth", -11.728836059570312 ], [ "▁highway", -11.728914260864258 ], [ "▁Vitamin", -11.729118347167969 ], [ "▁indication", -11.729349136352539 ], [ "80%", -11.72959041595459 ], [ "▁Lion", -11.729681015014648 ], [ "▁10,", -11.729693412780762 ], [ "▁Werk", -11.72974967956543 ], [ "▁combin", -11.729803085327148 ], [ "▁releases", -11.7298583984375 ], [ "LL", -11.730006217956543 ], [ "ktor", -11.730186462402344 ], [ "ufgrund", -11.73018741607666 ], [ "calc", -11.73034381866455 ], [ "▁accomplished", -11.730606079101562 ], [ "▁los", -11.730619430541992 ], [ "▁distant", -11.730688095092773 ], [ "▁secteur", -11.73068904876709 ], [ "logue", -11.730781555175781 ], [ "▁betting", -11.730792999267578 ], [ "elf", -11.731180191040039 ], [ "puteti", -11.73123550415039 ], [ "▁Moment", -11.731236457824707 ], [ "▁scoring", -11.731548309326172 ], [ "▁freuen", -11.731572151184082 ], [ "▁fastest", -11.731873512268066 ], [ "▁directors", -11.732080459594727 ], [ "▁fame", -11.732234954833984 ], [ "▁complaint", -11.732239723205566 ], [ "▁Ep", -11.732314109802246 ], [ "▁delicate", -11.732329368591309 ], [ "annonce", -11.73240852355957 ], [ "ext", -11.732454299926758 ], [ "▁quit", -11.732473373413086 ], [ "▁Cop", -11.73253345489502 ], [ "prop", -11.732565879821777 ], [ "365", -11.732742309570312 ], [ "▁Say", -11.732879638671875 ], [ "▁internationale", -11.733064651489258 ], [ "cott", -11.733213424682617 ], [ "▁Whatever", -11.733261108398438 ], [ "▁admir", -11.733261108398438 ], [ "▁bucur", -11.733549118041992 ], [ "▁entity", -11.733779907226562 ], [ "▁dancing", -11.733837127685547 ], [ "▁printre", -11.733892440795898 ], [ "▁meditation", -11.734396934509277 ], [ "▁avis", -11.734416961669922 ], [ "▁1988", -11.73447036743164 ], [ "10.", -11.734506607055664 ], [ "▁worker", -11.734638214111328 ], [ "▁$100", -11.734784126281738 ], [ "▁contrôle", -11.7349853515625 ], [ "▁insist", -11.734997749328613 ], [ "ements", -11.73505973815918 ], [ "izate", -11.735163688659668 ], [ "▁tied", -11.735332489013672 ], [ "▁correspond", -11.735396385192871 ], [ "▁apartments", -11.735547065734863 ], [ "▁2009.", -11.735599517822266 ], [ "▁tiles", -11.735624313354492 ], [ "▁boots", -11.735639572143555 ], [ "▁laundry", -11.735673904418945 ], [ "▁Coffee", -11.735674858093262 ], [ "▁CV", -11.735727310180664 ], [ "▁composed", -11.736035346984863 ], [ "atom", -11.73622989654541 ], [ "▁shore", -11.736270904541016 ], [ "▁marijuana", -11.736312866210938 ], [ "plic", -11.73648452758789 ], [ "▁Zahl", -11.736649513244629 ], [ "depth", -11.73682689666748 ], [ "▁Egypt", -11.736854553222656 ], [ "▁NFL", -11.736906051635742 ], [ "▁12,", -11.736922264099121 ], [ "▁pollution", -11.736964225769043 ], [ "▁Vergleich", -11.73704719543457 ], [ "û", -11.737109184265137 ], [ "▁nurse", -11.737153053283691 ], [ "▁Susan", -11.737173080444336 ], [ "▁verify", -11.737393379211426 ], [ "▁kon", -11.737504959106445 ], [ "▁ulei", -11.7376127243042 ], [ "▁Sept", -11.737699508666992 ], [ "▁Location", -11.737908363342285 ], [ "▁frozen", -11.737991333007812 ], [ "good", -11.73802661895752 ], [ "▁cine", -11.738066673278809 ], [ "forming", -11.738181114196777 ], [ "▁Near", -11.738391876220703 ], [ "▁Tab", -11.738545417785645 ], [ "▁Alexandr", -11.738600730895996 ], [ "ст", -11.73863697052002 ], [ "CK", -11.738656044006348 ], [ "▁loads", -11.738948822021484 ], [ "▁disorders", -11.738957405090332 ], [ "hip", -11.739596366882324 ], [ "▁blessing", -11.73987102508545 ], [ "▁vechi", -11.73997688293457 ], [ "▁Bookmark", -11.740296363830566 ], [ "SON", -11.74036979675293 ], [ "books", -11.740428924560547 ], [ "▁tropical", -11.740438461303711 ], [ "▁Garten", -11.740447044372559 ], [ "ôt", -11.740760803222656 ], [ "tures", -11.740827560424805 ], [ "▁obligation", -11.741010665893555 ], [ "▁admin", -11.741011619567871 ], [ "▁sélection", -11.741106986999512 ], [ "disp", -11.741172790527344 ], [ "▁Anyone", -11.741225242614746 ], [ "keeper", -11.74138355255127 ], [ "▁konnten", -11.741521835327148 ], [ "▁existe", -11.741615295410156 ], [ "▁Rund", -11.741798400878906 ], [ "▁retailers", -11.74184799194336 ], [ "folg", -11.741948127746582 ], [ "▁urmare", -11.742019653320312 ], [ "▁Liebe", -11.742321014404297 ], [ "▁actors", -11.742422103881836 ], [ "▁Druck", -11.742618560791016 ], [ "lien", -11.742752075195312 ], [ "sian", -11.742847442626953 ], [ "▁partid", -11.74304485321045 ], [ "▁loin", -11.743114471435547 ], [ "AZ", -11.743119239807129 ], [ "oasă", -11.743501663208008 ], [ "▁inclusiv", -11.743656158447266 ], [ "TD", -11.743680953979492 ], [ "▁anului", -11.743766784667969 ], [ "poc", -11.743844985961914 ], [ "▁musique", -11.743972778320312 ], [ "▁Hart", -11.743997573852539 ], [ "Sh", -11.744283676147461 ], [ "html", -11.744290351867676 ], [ "▁serial", -11.744318008422852 ], [ "țele", -11.744369506835938 ], [ "inning", -11.744544982910156 ], [ "▁Bureau", -11.744555473327637 ], [ "▁rush", -11.744626998901367 ], [ "▁deosebit", -11.744637489318848 ], [ "▁Wort", -11.744648933410645 ], [ "▁Thailand", -11.744688987731934 ], [ "▁Language", -11.745193481445312 ], [ "▁Governor", -11.745213508605957 ], [ "▁Later", -11.74525260925293 ], [ "rilor", -11.745282173156738 ], [ "▁activités", -11.745372772216797 ], [ "schaffen", -11.745598793029785 ], [ "▁harvest", -11.74567985534668 ], [ "▁municipal", -11.745783805847168 ], [ "einander", -11.74600601196289 ], [ "▁fingers", -11.746383666992188 ], [ "▁sculpture", -11.74638843536377 ], [ "▁Bien", -11.746390342712402 ], [ "▁departments", -11.746562957763672 ], [ "▁période", -11.746746063232422 ], [ "▁jeune", -11.746960639953613 ], [ "▁governments", -11.74710750579834 ], [ "uter", -11.747179985046387 ], [ "Aceste", -11.747220039367676 ], [ "▁Deal", -11.747243881225586 ], [ "▁Equipment", -11.74726390838623 ], [ "nous", -11.747300148010254 ], [ "▁gate", -11.747315406799316 ], [ "▁meta", -11.747447967529297 ], [ "▁stiu", -11.747474670410156 ], [ "fold", -11.747486114501953 ], [ "▁seule", -11.747523307800293 ], [ "▁varied", -11.747541427612305 ], [ "hit", -11.747635841369629 ], [ "▁DIY", -11.74768352508545 ], [ "▁lemn", -11.747685432434082 ], [ "OB", -11.747865676879883 ], [ "▁colorful", -11.748095512390137 ], [ "▁câ", -11.74826431274414 ], [ "▁semester", -11.74830150604248 ], [ "▁dealer", -11.748575210571289 ], [ "nett", -11.748788833618164 ], [ "▁shortly", -11.748932838439941 ], [ "▁Driver", -11.748983383178711 ], [ "culture", -11.749052047729492 ], [ "▁permitted", -11.749072074890137 ], [ "▁sorts", -11.749432563781738 ], [ "▁crop", -11.74999713897705 ], [ "▁valoare", -11.75046157836914 ], [ "▁analog", -11.750576972961426 ], [ "▁excuse", -11.750588417053223 ], [ "▁modèle", -11.750657081604004 ], [ "When", -11.75068473815918 ], [ "▁march", -11.750744819641113 ], [ "haz", -11.750978469848633 ], [ "▁minimize", -11.750992774963379 ], [ "traction", -11.751028060913086 ], [ "▁caracter", -11.752382278442383 ], [ "▁modules", -11.7523832321167 ], [ "clu", -11.75244426727295 ], [ "ţional", -11.752482414245605 ], [ "▁breach", -11.752562522888184 ], [ "▁priced", -11.752614974975586 ], [ "▁attorneys", -11.752644538879395 ], [ "▁implant", -11.752645492553711 ], [ "▁ANY", -11.752655029296875 ], [ "dition", -11.752707481384277 ], [ "▁trials", -11.752838134765625 ], [ "▁Nas", -11.75293254852295 ], [ "Pre", -11.752970695495605 ], [ "lorsque", -11.752979278564453 ], [ "plin", -11.753050804138184 ], [ "Er", -11.753056526184082 ], [ "▁Dom", -11.753067970275879 ], [ "▁tire", -11.753190040588379 ], [ "sili", -11.753233909606934 ], [ "▁coins", -11.753350257873535 ], [ "▁rend", -11.753470420837402 ], [ "▁reliability", -11.753503799438477 ], [ "▁Analysis", -11.753508567810059 ], [ "▁trails", -11.753692626953125 ], [ "trägt", -11.753762245178223 ], [ "▁Kansas", -11.753908157348633 ], [ "▁responsive", -11.75390911102295 ], [ "▁disappear", -11.753988265991211 ], [ "▁stakeholders", -11.754022598266602 ], [ "▁aplica", -11.754164695739746 ], [ "▁imi", -11.754180908203125 ], [ "▁Laura", -11.754369735717773 ], [ "▁Terms", -11.75440788269043 ], [ "450", -11.754460334777832 ], [ "▁voltage", -11.754483222961426 ], [ "▁Gel", -11.754544258117676 ], [ "▁qualities", -11.754549026489258 ], [ "▁qualifi", -11.754603385925293 ], [ "▁Mé", -11.754735946655273 ], [ "bereit", -11.754829406738281 ], [ "gleich", -11.754875183105469 ], [ "▁voting", -11.754961013793945 ], [ "▁trademark", -11.755128860473633 ], [ "▁2.5", -11.75515079498291 ], [ "ND", -11.755438804626465 ], [ "▁Kelly", -11.755470275878906 ], [ "▁weiteren", -11.755559921264648 ], [ "▁filters", -11.75562572479248 ], [ "▁coût", -11.75562858581543 ], [ "jur", -11.755765914916992 ], [ "acre", -11.755804061889648 ], [ "▁retired", -11.756022453308105 ], [ "▁Engine", -11.756205558776855 ], [ "▁président", -11.756264686584473 ], [ "ajul", -11.756307601928711 ], [ "▁GA", -11.756425857543945 ], [ "rät", -11.75666332244873 ], [ "▁instructor", -11.756669998168945 ], [ "▁Allen", -11.75668716430664 ], [ "▁Delhi", -11.756771087646484 ], [ "▁cure", -11.756844520568848 ], [ "seite", -11.756898880004883 ], [ "coming", -11.756914138793945 ], [ "▁mixing", -11.756963729858398 ], [ "▁Kno", -11.757041931152344 ], [ "▁Sure", -11.757079124450684 ], [ "▁hired", -11.757102012634277 ], [ "▁participated", -11.757196426391602 ], [ "Count", -11.757320404052734 ], [ "treffen", -11.757355690002441 ], [ "▁54", -11.75735855102539 ], [ "▁rings", -11.75735855102539 ], [ "▁Thor", -11.757359504699707 ], [ "éro", -11.75744915008545 ], [ "▁buttons", -11.757488250732422 ], [ "▁47", -11.757539749145508 ], [ "▁Tel", -11.757694244384766 ], [ "▁suport", -11.757776260375977 ], [ "▁rhythm", -11.75782585144043 ], [ "▁Theater", -11.758113861083984 ], [ "▁informatii", -11.758121490478516 ], [ "hält", -11.758201599121094 ], [ "▁ouvert", -11.758238792419434 ], [ "fewer", -11.75828742980957 ], [ "▁alumni", -11.758466720581055 ], [ "▁valley", -11.758508682250977 ], [ "tial", -11.75860595703125 ], [ "***", -11.758782386779785 ], [ "kri", -11.75905704498291 ], [ "▁accidents", -11.759113311767578 ], [ "▁barrel", -11.759170532226562 ], [ "mobil", -11.759310722351074 ], [ "etti", -11.759437561035156 ], [ "▁immigration", -11.759515762329102 ], [ "▁poveste", -11.759528160095215 ], [ "hren", -11.759669303894043 ], [ "hydr", -11.759719848632812 ], [ "▁tweet", -11.759744644165039 ], [ "▁zip", -11.759872436523438 ], [ "▁Bonus", -11.760189056396484 ], [ "ordnung", -11.760287284851074 ], [ "liber", -11.76046085357666 ], [ "▁Navy", -11.760591506958008 ], [ "▁agreements", -11.760612487792969 ], [ "▁detection", -11.7607421875 ], [ "DF", -11.760762214660645 ], [ "hur", -11.760774612426758 ], [ "0.00", -11.760798454284668 ], [ "▁07", -11.760866165161133 ], [ "etta", -11.760884284973145 ], [ "▁13,", -11.760887145996094 ], [ "rolled", -11.760970115661621 ], [ "▁injection", -11.761002540588379 ], [ "mig", -11.761017799377441 ], [ "wach", -11.761107444763184 ], [ "▁choisir", -11.761515617370605 ], [ "▁professionnels", -11.76159954071045 ], [ "▁Tower", -11.76169490814209 ], [ "▁neighbor", -11.76170539855957 ], [ "deutschen", -11.76187801361084 ], [ "▁luxurious", -11.76201057434082 ], [ "▁walks", -11.762033462524414 ], [ "reti", -11.762046813964844 ], [ "▁Pad", -11.762085914611816 ], [ "wise", -11.762297630310059 ], [ "▁exhaust", -11.762307167053223 ], [ "▁demonstration", -11.762582778930664 ], [ "▁agricultural", -11.762667655944824 ], [ "Upon", -11.762885093688965 ], [ "▁Blu", -11.76292610168457 ], [ "atorul", -11.762967109680176 ], [ "amour", -11.762984275817871 ], [ "issant", -11.763004302978516 ], [ "▁delighted", -11.763031959533691 ], [ "rita", -11.763113021850586 ], [ "requiring", -11.763195037841797 ], [ "ivity", -11.763216972351074 ], [ "▁Unser", -11.763306617736816 ], [ "FP", -11.763379096984863 ], [ "fait", -11.763533592224121 ], [ "dite", -11.763562202453613 ], [ "kul", -11.763716697692871 ], [ "arth", -11.76376724243164 ], [ "▁Ker", -11.763815879821777 ], [ "torilor", -11.763816833496094 ], [ "stage", -11.763866424560547 ], [ "▁HTML", -11.76398754119873 ], [ "▁Wheel", -11.764005661010742 ], [ "▁quelque", -11.76414680480957 ], [ "▁Ou", -11.764196395874023 ], [ "▁considerable", -11.764277458190918 ], [ "▁Sco", -11.76458740234375 ], [ "▁donations", -11.76481819152832 ], [ "dessen", -11.765002250671387 ], [ "▁pourquoi", -11.765039443969727 ], [ "▁Bow", -11.765189170837402 ], [ "▁Dupa", -11.76522445678711 ], [ "ska", -11.765707015991211 ], [ "hot", -11.765732765197754 ], [ "▁drove", -11.765849113464355 ], [ "▁oppos", -11.766018867492676 ], [ "▁hiking", -11.766035079956055 ], [ "▁Boot", -11.766081809997559 ], [ "One", -11.766087532043457 ], [ "▁guvern", -11.766094207763672 ], [ "▁15,", -11.766400337219238 ], [ "scheid", -11.766437530517578 ], [ "▁Miet", -11.766458511352539 ], [ "▁Technical", -11.766767501831055 ], [ "▁Dal", -11.7669038772583 ], [ "▁Metro", -11.766966819763184 ], [ "▁Baker", -11.767215728759766 ], [ "▁trece", -11.767252922058105 ], [ "tained", -11.767302513122559 ], [ "block", -11.76738452911377 ], [ "▁wander", -11.767401695251465 ], [ "▁penalty", -11.76742172241211 ], [ "▁shipped", -11.767509460449219 ], [ "▁30%", -11.767518043518066 ], [ "group", -11.767541885375977 ], [ "▁brothers", -11.767701148986816 ], [ "▁comanda", -11.767777442932129 ], [ "▁retreat", -11.767789840698242 ], [ "▁Movie", -11.767802238464355 ], [ "PU", -11.76787281036377 ], [ "▁Jun", -11.767885208129883 ], [ "▁$6", -11.767969131469727 ], [ "▁Fal", -11.768054962158203 ], [ "▁Palestinian", -11.768075942993164 ], [ "▁soccer", -11.768217086791992 ], [ "▁Autor", -11.768254280090332 ], [ "▁chamber", -11.768266677856445 ], [ "nement", -11.768463134765625 ], [ "▁offense", -11.768610954284668 ], [ "▁gig", -11.768631935119629 ], [ "▁abandon", -11.768691062927246 ], [ "▁Kraft", -11.768783569335938 ], [ "▁Medicare", -11.768784523010254 ], [ "▁soap", -11.768835067749023 ], [ "▁Fur", -11.768990516662598 ], [ "▁conditioning", -11.769103050231934 ], [ "rained", -11.769132614135742 ], [ "▁puts", -11.769134521484375 ], [ "▁cod", -11.76930046081543 ], [ "lassen", -11.76941967010498 ], [ "FL", -11.769600868225098 ], [ "▁komplett", -11.769664764404297 ], [ "▁entscheiden", -11.769665718078613 ], [ "▁Hour", -11.769691467285156 ], [ "?!", -11.770040512084961 ], [ "Stream", -11.770145416259766 ], [ "▁Grad", -11.770209312438965 ], [ "▁gently", -11.770231246948242 ], [ "▁poetry", -11.770429611206055 ], [ "▁secured", -11.770438194274902 ], [ "oph", -11.770466804504395 ], [ "hop", -11.770561218261719 ], [ "handel", -11.770634651184082 ], [ "▁besoins", -11.770658493041992 ], [ "got", -11.770824432373047 ], [ "▁Chrome", -11.77088737487793 ], [ "ILL", -11.770930290222168 ], [ "▁Schritt", -11.771014213562012 ], [ "▁spell", -11.771063804626465 ], [ "▁grinding", -11.771334648132324 ], [ "▁ramp", -11.77144718170166 ], [ "▁mama", -11.7716064453125 ], [ "▁bottles", -11.77180290222168 ], [ "▁canvas", -11.771906852722168 ], [ "▁ecosystem", -11.77194595336914 ], [ "aţii", -11.771967887878418 ], [ "cellular", -11.772085189819336 ], [ "▁Spin", -11.772164344787598 ], [ "▁Discover", -11.772217750549316 ], [ "-17", -11.772322654724121 ], [ "▁feeding", -11.77246379852295 ], [ "▁stops", -11.7725191116333 ], [ "▁haute", -11.772552490234375 ], [ "▁Entscheidung", -11.7725830078125 ], [ "▁semble", -11.772590637207031 ], [ "▁acele", -11.772857666015625 ], [ "▁Walk", -11.773154258728027 ], [ "▁joke", -11.773180961608887 ], [ "▁Fed", -11.773294448852539 ], [ "climat", -11.773306846618652 ], [ "▁Lot", -11.773460388183594 ], [ "runner", -11.773551940917969 ], [ "▁flip", -11.773786544799805 ], [ "▁werde", -11.773818016052246 ], [ "▁Deck", -11.77417278289795 ], [ "bala", -11.774296760559082 ], [ "▁sacrifice", -11.774375915527344 ], [ "cid", -11.774388313293457 ], [ "him", -11.774569511413574 ], [ "zahlen", -11.774587631225586 ], [ "▁heater", -11.774596214294434 ], [ "formed", -11.774619102478027 ], [ "plus", -11.774711608886719 ], [ "▁util", -11.774742126464844 ], [ "rama", -11.775019645690918 ], [ "(4)", -11.7750244140625 ], [ "▁knife", -11.775111198425293 ], [ "▁traditions", -11.77520751953125 ], [ "▁dip", -11.775357246398926 ], [ "kill", -11.775405883789062 ], [ "▁Rich", -11.775418281555176 ], [ "▁DI", -11.775555610656738 ], [ "▁containers", -11.775677680969238 ], [ "▁locuri", -11.775728225708008 ], [ "▁continent", -11.775797843933105 ], [ "teilung", -11.776005744934082 ], [ "▁vreme", -11.776028633117676 ], [ "organisation", -11.776126861572266 ], [ "serie", -11.776135444641113 ], [ "▁Diamond", -11.776204109191895 ], [ "magazin", -11.77627944946289 ], [ "▁poster", -11.776455879211426 ], [ "▁passenger", -11.7765474319458 ], [ "▁soldiers", -11.776552200317383 ], [ "▁urgent", -11.776616096496582 ], [ "▁Lip", -11.77680778503418 ], [ "▁aşa", -11.776972770690918 ], [ "▁BO", -11.777024269104004 ], [ "▁somebody", -11.777076721191406 ], [ "▁silence", -11.777132034301758 ], [ "cop", -11.777359962463379 ], [ "▁Burn", -11.77749252319336 ], [ "▁stopping", -11.777544021606445 ], [ "▁essence", -11.777568817138672 ], [ "▁hitting", -11.777762413024902 ], [ "▁producers", -11.777801513671875 ], [ "▁fibre", -11.777894020080566 ], [ "▁seasonal", -11.777960777282715 ], [ "▁tara", -11.778096199035645 ], [ "▁Jose", -11.778099060058594 ], [ "▁Better", -11.77825927734375 ], [ "▁steep", -11.778295516967773 ], [ "Alors", -11.778353691101074 ], [ "▁collecting", -11.778507232666016 ], [ "vre", -11.778635025024414 ], [ "▁disabled", -11.77863883972168 ], [ "▁voters", -11.778679847717285 ], [ "consuming", -11.779092788696289 ], [ "deemed", -11.779115676879883 ], [ "éra", -11.779227256774902 ], [ "opération", -11.779273986816406 ], [ "▁roller", -11.779305458068848 ], [ "Rather", -11.779321670532227 ], [ "▁leider", -11.779370307922363 ], [ "▁IV", -11.779434204101562 ], [ "▁erreichen", -11.779473304748535 ], [ "▁charging", -11.779657363891602 ], [ "tions", -11.77973747253418 ], [ "tiques", -11.779861450195312 ], [ "▁formats", -11.779876708984375 ], [ "▁painful", -11.78000545501709 ], [ "▁eager", -11.780061721801758 ], [ "generation", -11.780137062072754 ], [ "anna", -11.780235290527344 ], [ "▁races", -11.780323028564453 ], [ "force", -11.780357360839844 ], [ "▁ferm", -11.780522346496582 ], [ "▁breathing", -11.780618667602539 ], [ "▁offen", -11.780648231506348 ], [ "▁minds", -11.780805587768555 ], [ "▁musste", -11.780832290649414 ], [ "▁Vision", -11.780888557434082 ], [ "▁Installation", -11.780988693237305 ], [ "▁hesitate", -11.781002044677734 ], [ "▁somit", -11.781023979187012 ], [ "hôtel", -11.781044006347656 ], [ "cab", -11.781235694885254 ], [ "-16", -11.781312942504883 ], [ "▁Visual", -11.781418800354004 ], [ "intérêt", -11.781524658203125 ], [ "▁apel", -11.781831741333008 ], [ "therapy", -11.782089233398438 ], [ "volt", -11.78225040435791 ], [ "▁Rou", -11.782439231872559 ], [ "▁efficace", -11.782464027404785 ], [ "▁architectural", -11.782605171203613 ], [ "▁privilege", -11.782670974731445 ], [ "▁treating", -11.782711029052734 ], [ "▁Tam", -11.782722473144531 ], [ "tsch", -11.782744407653809 ], [ "building", -11.782750129699707 ], [ "▁associations", -11.782929420471191 ], [ "▁Consumer", -11.783424377441406 ], [ "▁Lim", -11.783496856689453 ], [ "newest", -11.7835054397583 ], [ "▁față", -11.783675193786621 ], [ "▁ships", -11.783732414245605 ], [ "lev", -11.78373908996582 ], [ "raft", -11.783817291259766 ], [ "▁variations", -11.783845901489258 ], [ "▁noua", -11.78386402130127 ], [ "▁Cab", -11.784063339233398 ], [ "1.2", -11.78409481048584 ], [ "▁ocazi", -11.784347534179688 ], [ "▁recommendation", -11.784449577331543 ], [ "titled", -11.78445053100586 ], [ "▁invoice", -11.78459644317627 ], [ "▁noastra", -11.784647941589355 ], [ "kur", -11.784700393676758 ], [ "issent", -11.784758567810059 ], [ "base", -11.784778594970703 ], [ "hä", -11.7848482131958 ], [ "888", -11.784914016723633 ], [ "▁declar", -11.784941673278809 ], [ "▁Football", -11.7850341796875 ], [ "▁Indeed", -11.785293579101562 ], [ "▁weapon", -11.785333633422852 ], [ "▁destroyed", -11.785457611083984 ], [ "▁enormous", -11.785594940185547 ], [ "▁blanket", -11.7857084274292 ], [ "▁aktiv", -11.785759925842285 ], [ "raw", -11.785791397094727 ], [ "▁computing", -11.785823822021484 ], [ "6)", -11.785955429077148 ], [ "▁Dam", -11.786152839660645 ], [ "▁confort", -11.786174774169922 ], [ "▁Gla", -11.786198616027832 ], [ "hardly", -11.786242485046387 ], [ "▁annually", -11.786269187927246 ], [ "▁destinations", -11.786401748657227 ], [ "▁guilty", -11.786404609680176 ], [ "▁scholarship", -11.786439895629883 ], [ "▁harmful", -11.786453247070312 ], [ "▁2-3", -11.786616325378418 ], [ "▁Race", -11.786638259887695 ], [ "▁hypo", -11.78671646118164 ], [ "▁shorter", -11.786733627319336 ], [ "quest", -11.78675651550293 ], [ "uze", -11.786812782287598 ], [ "izi", -11.787005424499512 ], [ "OO", -11.787095069885254 ], [ "▁Schutz", -11.787097930908203 ], [ "▁Teilnehmer", -11.787185668945312 ], [ "▁profiles", -11.787199020385742 ], [ "▁sustainability", -11.78747272491455 ], [ "▁emb", -11.787489891052246 ], [ "▁Augen", -11.787516593933105 ], [ "▁outdoors", -11.787542343139648 ], [ "▁Individual", -11.787548065185547 ], [ "▁pou", -11.78757095336914 ], [ "▁Together", -11.787575721740723 ], [ "HT", -11.787674903869629 ], [ "suited", -11.787755012512207 ], [ "▁tro", -11.787782669067383 ], [ "▁Strom", -11.787805557250977 ], [ "▁achievement", -11.78799819946289 ], [ "▁Range", -11.78815746307373 ], [ "tory", -11.78817081451416 ], [ "▁distribute", -11.788250923156738 ], [ "▁letzte", -11.788276672363281 ], [ "incorporated", -11.788287162780762 ], [ "▁Kir", -11.788325309753418 ], [ "ruf", -11.78839111328125 ], [ "▁disappointed", -11.788543701171875 ], [ "▁referral", -11.788602828979492 ], [ "flam", -11.788687705993652 ], [ "▁excessive", -11.7886962890625 ], [ "▁rapidement", -11.788743019104004 ], [ "▁Rio", -11.78875732421875 ], [ "aţia", -11.788951873779297 ], [ "▁meuble", -11.78912353515625 ], [ "▁2008.", -11.789135932922363 ], [ "▁Gall", -11.78915023803711 ], [ "▁française", -11.789369583129883 ], [ "▁ladies", -11.789695739746094 ], [ "ailed", -11.789746284484863 ], [ "El", -11.789834976196289 ], [ "▁wines", -11.789868354797363 ], [ "▁beispielsweise", -11.789876937866211 ], [ "▁gamme", -11.790193557739258 ], [ "▁guided", -11.79028034210205 ], [ "▁plin", -11.790339469909668 ], [ "Î", -11.790390968322754 ], [ "▁True", -11.790498733520508 ], [ "▁Temple", -11.790507316589355 ], [ "▁Pic", -11.790520668029785 ], [ "permalink", -11.790547370910645 ], [ "▁vedea", -11.790656089782715 ], [ "▁rank", -11.790922164916992 ], [ "▁Grill", -11.791025161743164 ], [ "clin", -11.791070938110352 ], [ "▁Hab", -11.791089057922363 ], [ "▁odds", -11.791125297546387 ], [ "▁anytime", -11.791146278381348 ], [ "▁Thanksgiving", -11.791265487670898 ], [ "guard", -11.791300773620605 ], [ "▁essays", -11.791389465332031 ], [ "▁PE", -11.79139518737793 ], [ "▁Rechts", -11.791494369506836 ], [ "mals", -11.791751861572266 ], [ "achi", -11.791762351989746 ], [ "▁Anthony", -11.791765213012695 ], [ "▁réponse", -11.792036056518555 ], [ "standing", -11.79227352142334 ], [ "▁Mol", -11.792427062988281 ], [ "▁Canon", -11.792474746704102 ], [ "▁silk", -11.792515754699707 ], [ "▁pourrait", -11.79278564453125 ], [ "▁raport", -11.79280948638916 ], [ "▁Woche", -11.792889595031738 ], [ "fallen", -11.79293155670166 ], [ "sting", -11.79310131072998 ], [ "▁circulation", -11.793102264404297 ], [ "▁skirt", -11.7931547164917 ], [ "▁Title", -11.793187141418457 ], [ "▁17.", -11.79331111907959 ], [ "▁Touch", -11.793486595153809 ], [ "▁utilizat", -11.79352855682373 ], [ "▁Organisation", -11.793569564819336 ], [ "▁mereu", -11.793848991394043 ], [ "▁oxygen", -11.793953895568848 ], [ "lique", -11.793985366821289 ], [ "▁consume", -11.794100761413574 ], [ "▁Barb", -11.794102668762207 ], [ "1.1", -11.794105529785156 ], [ "▁nicely", -11.79419231414795 ], [ "▁psychological", -11.794227600097656 ], [ "▁refrigerator", -11.794478416442871 ], [ "▁fantasy", -11.79481029510498 ], [ "▁dispute", -11.79494571685791 ], [ "▁IBM", -11.794954299926758 ], [ "▁Nation", -11.794971466064453 ], [ "▁mobil", -11.795063972473145 ], [ "▁density", -11.795201301574707 ], [ "ske", -11.795230865478516 ], [ "▁intimate", -11.795313835144043 ], [ "▁tailored", -11.795319557189941 ], [ "▁outline", -11.795472145080566 ], [ "TN", -11.79554557800293 ], [ "mur", -11.795634269714355 ], [ "GC", -11.795662879943848 ], [ "they", -11.795992851257324 ], [ "pag", -11.796161651611328 ], [ "▁Kultur", -11.796246528625488 ], [ "grün", -11.796281814575195 ], [ "voted", -11.796529769897461 ], [ "▁donné", -11.796546936035156 ], [ "▁Să", -11.796629905700684 ], [ "enberg", -11.796648979187012 ], [ "▁wi", -11.79686450958252 ], [ "▁Francis", -11.797057151794434 ], [ "▁Rick", -11.797157287597656 ], [ "accord", -11.797403335571289 ], [ "▁Zusammen", -11.797415733337402 ], [ "▁nonprofit", -11.797456741333008 ], [ "▁listings", -11.797615051269531 ], [ "6,", -11.797908782958984 ], [ "▁maximize", -11.798253059387207 ], [ "bud", -11.798345565795898 ], [ "▁promotional", -11.798486709594727 ], [ "cina", -11.798646926879883 ], [ "▁potatoes", -11.79869556427002 ], [ "▁mot", -11.798871040344238 ], [ "carries", -11.799384117126465 ], [ "▁stabilit", -11.799458503723145 ], [ "▁Door", -11.799574851989746 ], [ "▁downloaded", -11.799574851989746 ], [ "▁experimental", -11.799724578857422 ], [ "HD", -11.7997407913208 ], [ "▁parfois", -11.79980182647705 ], [ "▁zeigen", -11.800092697143555 ], [ "▁proposé", -11.80030632019043 ], [ "▁Verein", -11.800636291503906 ], [ "▁amestec", -11.800676345825195 ], [ "▁entreprise", -11.800718307495117 ], [ "▁PSD", -11.800841331481934 ], [ "▁bake", -11.800897598266602 ], [ "▁Rh", -11.800904273986816 ], [ "▁Mehr", -11.800922393798828 ], [ "▁purple", -11.801074028015137 ], [ "▁recipient", -11.80109691619873 ], [ "rare", -11.801166534423828 ], [ "egi", -11.80117130279541 ], [ "ancien", -11.801176071166992 ], [ "▁risque", -11.80118465423584 ], [ "▁mystery", -11.80157470703125 ], [ "mac", -11.801697731018066 ], [ "ibility", -11.80182933807373 ], [ "▁Moore", -11.801881790161133 ], [ "▁flavors", -11.801911354064941 ], [ "▁trauma", -11.801966667175293 ], [ "▁automotive", -11.802112579345703 ], [ "▁Anyway", -11.802197456359863 ], [ "▁simulation", -11.802253723144531 ], [ "▁crafts", -11.802525520324707 ], [ "▁measurements", -11.80257511138916 ], [ "▁cour", -11.80257797241211 ], [ "▁tard", -11.802600860595703 ], [ "nnie", -11.802881240844727 ], [ "▁Production", -11.803388595581055 ], [ "▁Cleaning", -11.803567886352539 ], [ "5,", -11.803644180297852 ], [ "▁Islamic", -11.803766250610352 ], [ "▁Gate", -11.80378532409668 ], [ "bay", -11.803814888000488 ], [ "HR", -11.803990364074707 ], [ "▁Offer", -11.80399227142334 ], [ "▁acceptance", -11.804107666015625 ], [ "▁Erfahrung", -11.80412769317627 ], [ "▁environ", -11.804193496704102 ], [ "▁fancy", -11.804218292236328 ], [ "▁bullet", -11.80437183380127 ], [ "organ", -11.804466247558594 ], [ "▁Peace", -11.804520606994629 ], [ "▁detalii", -11.80461597442627 ], [ "▁promised", -11.804715156555176 ], [ "▁wellness", -11.804746627807617 ], [ "▁satisfy", -11.80481243133545 ], [ "▁grants", -11.805212020874023 ], [ "accueil", -11.80522346496582 ], [ "▁oben", -11.805412292480469 ], [ "▁prospects", -11.80543327331543 ], [ "▁Events", -11.805513381958008 ], [ "2013", -11.805569648742676 ], [ "gesehen", -11.805685997009277 ], [ "▁£1", -11.805727005004883 ], [ "▁handelt", -11.805798530578613 ], [ "▁Spieler", -11.805876731872559 ], [ "▁Virtual", -11.806145668029785 ], [ "▁bubble", -11.806239128112793 ], [ "▁Trend", -11.806254386901855 ], [ "▁sistemul", -11.806315422058105 ], [ "▁Morgan", -11.806320190429688 ], [ "▁pole", -11.806503295898438 ], [ "▁spielen", -11.806533813476562 ], [ "tür", -11.806571006774902 ], [ "SCO", -11.806572914123535 ], [ "▁informative", -11.806678771972656 ], [ "▁affirm", -11.806755065917969 ], [ "▁Aqua", -11.806818008422852 ], [ "▁AR", -11.806888580322266 ], [ "richten", -11.807071685791016 ], [ "▁rewards", -11.807122230529785 ], [ "lub", -11.807235717773438 ], [ "shot", -11.807236671447754 ], [ "LM", -11.807540893554688 ], [ "Up", -11.807586669921875 ], [ "▁absolut", -11.807737350463867 ], [ "▁Mart", -11.807806968688965 ], [ "erweise", -11.807812690734863 ], [ "BP", -11.807977676391602 ], [ "▁difficile", -11.808152198791504 ], [ "▁Document", -11.808159828186035 ], [ "▁Sweet", -11.8082914352417 ], [ "▁indicator", -11.808338165283203 ], [ "▁Boden", -11.808389663696289 ], [ "mates", -11.808477401733398 ], [ "▁supporters", -11.808504104614258 ], [ "▁begun", -11.808600425720215 ], [ "▁blogging", -11.808611869812012 ], [ "▁CL", -11.808663368225098 ], [ "gres", -11.808692932128906 ], [ "▁preferences", -11.808738708496094 ], [ "▁screw", -11.808756828308105 ], [ "▁tutor", -11.808858871459961 ], [ "▁Additional", -11.80891227722168 ], [ "▁Bitte", -11.808976173400879 ], [ "utilizing", -11.808998107910156 ], [ "▁expérience", -11.809073448181152 ], [ "▁dur", -11.809146881103516 ], [ "▁precisely", -11.809178352355957 ], [ "▁janvier", -11.809394836425781 ], [ "AGE", -11.80987548828125 ], [ "moto", -11.810007095336914 ], [ "▁counsel", -11.810195922851562 ], [ "▁110", -11.810226440429688 ], [ "nick", -11.810245513916016 ], [ "licit", -11.810540199279785 ], [ "technik", -11.810659408569336 ], [ "▁collaborate", -11.810736656188965 ], [ "▁neighbors", -11.810794830322266 ], [ "tered", -11.810922622680664 ], [ "▁excel", -11.811025619506836 ], [ "▁Route", -11.811059951782227 ], [ "steuer", -11.81109619140625 ], [ "▁pioneer", -11.811607360839844 ], [ "nuit", -11.81169319152832 ], [ "▁skip", -11.811963081359863 ], [ "▁destruction", -11.811997413635254 ], [ "▁thesis", -11.812249183654785 ], [ "▁libre", -11.812317848205566 ], [ "▁petition", -11.81234073638916 ], [ "▁steady", -11.812456130981445 ], [ "▁medications", -11.812458992004395 ], [ "▁audiences", -11.812623023986816 ], [ "▁coaches", -11.812689781188965 ], [ "aller", -11.812704086303711 ], [ "3,000", -11.812705993652344 ], [ "▁anger", -11.812785148620605 ], [ "▁striking", -11.812844276428223 ], [ "▁shades", -11.81291675567627 ], [ "▁Sitz", -11.812994956970215 ], [ "▁gluten", -11.813162803649902 ], [ "▁egal", -11.813222885131836 ], [ "ania", -11.813223838806152 ], [ "▁defend", -11.813241004943848 ], [ "gut", -11.81382942199707 ], [ "▁reserves", -11.813895225524902 ], [ "▁advocate", -11.814053535461426 ], [ "▁Cit", -11.814082145690918 ], [ "▁technicians", -11.814105033874512 ], [ "▁cater", -11.814138412475586 ], [ "leitung", -11.814190864562988 ], [ "▁towns", -11.814335823059082 ], [ "▁Costa", -11.814364433288574 ], [ "▁confront", -11.814567565917969 ], [ "mount", -11.814652442932129 ], [ "▁nationale", -11.814706802368164 ], [ "▁adverse", -11.814932823181152 ], [ "▁couleur", -11.815112113952637 ], [ "▁delight", -11.815169334411621 ], [ "▁promises", -11.815224647521973 ], [ "▁silent", -11.81550121307373 ], [ "richtet", -11.815556526184082 ], [ "▁Companies", -11.815614700317383 ], [ "▁Charlotte", -11.815620422363281 ], [ "▁labels", -11.815652847290039 ], [ "▁Süd", -11.815656661987305 ], [ "▁Honor", -11.81567096710205 ], [ "▁complaints", -11.815710067749023 ], [ "▁siècle", -11.815752029418945 ], [ "▁suits", -11.815792083740234 ], [ "▁Bath", -11.815827369689941 ], [ "mise", -11.815926551818848 ], [ "▁acela", -11.8159818649292 ], [ "▁candidat", -11.816011428833008 ], [ "Flo", -11.816207885742188 ], [ "▁conservative", -11.816215515136719 ], [ "DD", -11.816314697265625 ], [ "▁changement", -11.816414833068848 ], [ "▁login", -11.816492080688477 ], [ "▁Fashion", -11.816585540771484 ], [ "reichen", -11.816672325134277 ], [ "through", -11.816751480102539 ], [ "aki", -11.817240715026855 ], [ "gna", -11.817547798156738 ], [ "▁verse", -11.817551612854004 ], [ "▁threats", -11.817622184753418 ], [ "▁Song", -11.817770004272461 ], [ "▁funded", -11.81792163848877 ], [ "langen", -11.818023681640625 ], [ "▁distribu", -11.818195343017578 ], [ "édition", -11.818316459655762 ], [ "▁royal", -11.818562507629395 ], [ "▁bevor", -11.818829536437988 ], [ "▁02", -11.818854331970215 ], [ "straße", -11.818938255310059 ], [ "edit", -11.81904125213623 ], [ "▁energetic", -11.81922721862793 ], [ "▁Carr", -11.819757461547852 ], [ "viol", -11.819937705993652 ], [ "▁niche", -11.820054054260254 ], [ "avais", -11.820099830627441 ], [ "▁backyard", -11.82010269165039 ], [ "▁Saudi", -11.820158958435059 ], [ "▁Zwei", -11.820207595825195 ], [ "▁Legal", -11.82027530670166 ], [ "accessed", -11.820277214050293 ], [ "▁choisi", -11.820340156555176 ], [ "▁GDP", -11.820343971252441 ], [ "oferă", -11.820352554321289 ], [ "hlen", -11.820490837097168 ], [ "▁Wor", -11.820520401000977 ], [ "▁cheer", -11.820586204528809 ], [ "▁barely", -11.820625305175781 ], [ "cost", -11.820646286010742 ], [ "▁Really", -11.820661544799805 ], [ "kol", -11.820721626281738 ], [ "▁binding", -11.821045875549316 ], [ "euer", -11.821136474609375 ], [ "▁optimization", -11.821158409118652 ], [ "▁Designer", -11.8211669921875 ], [ "▁measuring", -11.82117748260498 ], [ "ncy", -11.821516036987305 ], [ "weise", -11.821520805358887 ], [ "DER", -11.821850776672363 ], [ "▁$7", -11.821949005126953 ], [ "▁Anfang", -11.821954727172852 ], [ "material", -11.821967124938965 ], [ "▁antique", -11.822281837463379 ], [ "▁Certificate", -11.822294235229492 ], [ "▁modest", -11.822370529174805 ], [ "ției", -11.822427749633789 ], [ "▁praise", -11.82245922088623 ], [ "▁Springs", -11.822660446166992 ], [ "▁organiza", -11.823041915893555 ], [ "jurul", -11.823047637939453 ], [ "▁plumbing", -11.82341194152832 ], [ "▁foster", -11.823490142822266 ], [ "▁Wy", -11.823491096496582 ], [ "▁Sab", -11.823503494262695 ], [ "▁overwhelming", -11.823677062988281 ], [ "▁matin", -11.823812484741211 ], [ "▁responded", -11.82408332824707 ], [ "▁confused", -11.824150085449219 ], [ "▁blessed", -11.824280738830566 ], [ "▁160", -11.824295997619629 ], [ "▁ingredient", -11.824360847473145 ], [ "▁confer", -11.82448673248291 ], [ "▁Gesundheit", -11.824530601501465 ], [ "▁bucket", -11.824555397033691 ], [ "kraft", -11.824565887451172 ], [ "lange", -11.824630737304688 ], [ "▁Kopf", -11.824678421020508 ], [ "▁Prize", -11.824678421020508 ], [ "▁authorized", -11.824779510498047 ], [ "▁tick", -11.824803352355957 ], [ "▁steal", -11.824910163879395 ], [ "Depending", -11.824918746948242 ], [ "Depuis", -11.824952125549316 ], [ "▁functie", -11.82499885559082 ], [ "▁developments", -11.825053215026855 ], [ "▁Christians", -11.825311660766602 ], [ "▁calculated", -11.8256254196167 ], [ "▁Leave", -11.825672149658203 ], [ "▁Jam", -11.82573413848877 ], [ "▁habitat", -11.825760841369629 ], [ "▁Sorry", -11.825801849365234 ], [ "▁oficial", -11.825944900512695 ], [ "▁allein", -11.826079368591309 ], [ "▁concentrate", -11.82608413696289 ], [ "dica", -11.826302528381348 ], [ "▁Convention", -11.826476097106934 ], [ "illes", -11.826550483703613 ], [ "▁fum", -11.82664680480957 ], [ "▁Tal", -11.826651573181152 ], [ "Europe", -11.826899528503418 ], [ "▁attachment", -11.826949119567871 ], [ "▁sensibil", -11.826995849609375 ], [ "▁clue", -11.82715892791748 ], [ "▁specialty", -11.827203750610352 ], [ "▁Cou", -11.827229499816895 ], [ "▁liste", -11.827278137207031 ], [ "▁Penn", -11.827465057373047 ], [ "TRA", -11.827559471130371 ], [ "▁Themen", -11.827561378479004 ], [ "▁motivated", -11.827906608581543 ], [ "▁camere", -11.828017234802246 ], [ "▁14,", -11.828393936157227 ], [ "▁attendance", -11.828557968139648 ], [ "atorii", -11.828581809997559 ], [ "chemistry", -11.82873821258545 ], [ "▁roofing", -11.828959465026855 ], [ "▁Links", -11.829048156738281 ], [ "▁trou", -11.829103469848633 ], [ "▁trucks", -11.829136848449707 ], [ "hilfe", -11.829557418823242 ], [ "▁(6", -11.829599380493164 ], [ "vapor", -11.82964038848877 ], [ "mad", -11.829668045043945 ], [ "▁Albert", -11.829877853393555 ], [ "▁FIG", -11.830073356628418 ], [ "▁Rand", -11.830187797546387 ], [ "▁Constitution", -11.830219268798828 ], [ "ambi", -11.830294609069824 ], [ "▁Syria", -11.830307006835938 ], [ "▁Fond", -11.830477714538574 ], [ "▁gouvernement", -11.830594062805176 ], [ "▁Active", -11.830705642700195 ], [ "▁prints", -11.830801963806152 ], [ "▁weigh", -11.8308687210083 ], [ "▁Craft", -11.831069946289062 ], [ "▁projets", -11.831247329711914 ], [ "▁paste", -11.831377029418945 ], [ "anci", -11.83139705657959 ], [ "kie", -11.831411361694336 ], [ "▁gains", -11.83165168762207 ], [ "▁Record", -11.831942558288574 ], [ "▁beliefs", -11.831954956054688 ], [ "countless", -11.831957817077637 ], [ "▁tomatoes", -11.831997871398926 ], [ "arie", -11.832082748413086 ], [ "▁140", -11.83211612701416 ], [ "▁ethical", -11.832229614257812 ], [ "objectif", -11.832279205322266 ], [ "▁acestuia", -11.832283973693848 ], [ "▁Bluetooth", -11.832398414611816 ], [ "▁agriculture", -11.832746505737305 ], [ "uré", -11.833027839660645 ], [ "▁cale", -11.833072662353516 ], [ "▁articol", -11.833073616027832 ], [ "▁gum", -11.833319664001465 ], [ "▁vendor", -11.833490371704102 ], [ "ifié", -11.833527565002441 ], [ "▁peer", -11.833662033081055 ], [ "pod", -11.834036827087402 ], [ "▁utilized", -11.834113121032715 ], [ "▁Mü", -11.834207534790039 ], [ "owohl", -11.834208488464355 ], [ "hilst", -11.834233283996582 ], [ "frame", -11.834260940551758 ], [ "▁fridge", -11.834822654724121 ], [ "▁query", -11.835108757019043 ], [ "▁Survey", -11.835227012634277 ], [ "▁Hell", -11.835247993469238 ], [ "▁notification", -11.83530044555664 ], [ "TR", -11.83538818359375 ], [ "▁ultima", -11.835505485534668 ], [ "▁radiation", -11.835631370544434 ], [ "▁musicians", -11.835821151733398 ], [ "CAN", -11.83595085144043 ], [ "▁grocery", -11.83607292175293 ], [ "▁Sicherheit", -11.83611011505127 ], [ "▁Highway", -11.836276054382324 ], [ "▁Break", -11.836285591125488 ], [ "TED", -11.836345672607422 ], [ "ön", -11.836352348327637 ], [ "▁biological", -11.836352348327637 ], [ "qual", -11.836397171020508 ], [ "250", -11.83641242980957 ], [ "▁modify", -11.836651802062988 ], [ "▁Hit", -11.836698532104492 ], [ "▁Iar", -11.836838722229004 ], [ "aged", -11.836884498596191 ], [ "...)", -11.83688735961914 ], [ "▁contrat", -11.836928367614746 ], [ "▁centres", -11.836956977844238 ], [ "griff", -11.836987495422363 ], [ "Our", -11.837233543395996 ], [ "▁determination", -11.837300300598145 ], [ "▁variables", -11.83742904663086 ], [ "▁nuts", -11.837472915649414 ], [ "échange", -11.837577819824219 ], [ "extérieur", -11.837631225585938 ], [ "▁suflet", -11.83764362335205 ], [ "▁Scha", -11.837752342224121 ], [ "stück", -11.837774276733398 ], [ "▁Tau", -11.837821960449219 ], [ "▁participa", -11.838008880615234 ], [ "▁mad", -11.838034629821777 ], [ "▁relie", -11.838051795959473 ], [ "▁Fine", -11.83808422088623 ], [ "▁grape", -11.838118553161621 ], [ "▁wage", -11.838141441345215 ], [ "▁startup", -11.838193893432617 ], [ "▁blank", -11.838194847106934 ], [ "▁physique", -11.838199615478516 ], [ "▁punch", -11.838233947753906 ], [ "▁contacts", -11.838321685791016 ], [ "▁dezvolt", -11.83835220336914 ], [ "cross", -11.838639259338379 ], [ "▁TR", -11.838652610778809 ], [ "▁gener", -11.838754653930664 ], [ "▁indem", -11.838823318481445 ], [ "▁Stan", -11.838839530944824 ], [ "▁azi", -11.838930130004883 ], [ "▁Sel", -11.838958740234375 ], [ "▁Tot", -11.83924674987793 ], [ "vra", -11.839341163635254 ], [ "▁recruit", -11.839482307434082 ], [ "▁Yeah", -11.839494705200195 ], [ "/10", -11.839507102966309 ], [ "▁nail", -11.83956241607666 ], [ "▁Ky", -11.839611053466797 ], [ "▁beloved", -11.839760780334473 ], [ "operative", -11.839823722839355 ], [ "▁Tickets", -11.83983325958252 ], [ "▁tear", -11.840229988098145 ], [ "▁amp", -11.840352058410645 ], [ "▁04", -11.840361595153809 ], [ "▁illustrate", -11.840361595153809 ], [ "▁mac", -11.840400695800781 ], [ "▁receiver", -11.840482711791992 ], [ "atrice", -11.840508460998535 ], [ "▁souhait", -11.840572357177734 ], [ "▁Gewinn", -11.840619087219238 ], [ "▁Vit", -11.840808868408203 ], [ "roch", -11.841202735900879 ], [ "▁arata", -11.841262817382812 ], [ "▁Indiana", -11.841364860534668 ], [ "child", -11.841516494750977 ], [ "▁invested", -11.84157657623291 ], [ "▁Excellent", -11.841625213623047 ], [ "gori", -11.841769218444824 ], [ "▁thermal", -11.841813087463379 ], [ "Str", -11.841973304748535 ], [ "▁liver", -11.84201717376709 ], [ "miss", -11.842035293579102 ], [ "▁utiliser", -11.842120170593262 ], [ "▁prest", -11.842445373535156 ], [ "2016", -11.842506408691406 ], [ "isée", -11.842508316040039 ], [ "▁Index", -11.842559814453125 ], [ "▁arch", -11.842639923095703 ], [ "▁Toyota", -11.842748641967773 ], [ "▁YOUR", -11.842782020568848 ], [ "▁Mexican", -11.842891693115234 ], [ "▁gegenüber", -11.842940330505371 ], [ "▁cannabis", -11.843033790588379 ], [ "bis", -11.843077659606934 ], [ "vage", -11.843083381652832 ], [ "hall", -11.843091011047363 ], [ "fax", -11.843137741088867 ], [ "▁spoken", -11.843232154846191 ], [ "▁Zimmer", -11.843544960021973 ], [ "kauf", -11.8436279296875 ], [ "▁couleurs", -11.843705177307129 ], [ "▁NJ", -11.844026565551758 ], [ "▁Heritage", -11.844318389892578 ], [ "▁Pflege", -11.844321250915527 ], [ "luc", -11.844361305236816 ], [ "▁56", -11.844489097595215 ], [ "VP", -11.844542503356934 ], [ "▁cuvinte", -11.844594955444336 ], [ "▁Alliance", -11.844614028930664 ], [ "▁coco", -11.844615936279297 ], [ "▁leverage", -11.844762802124023 ], [ "auch", -11.844844818115234 ], [ "▁Cart", -11.84506607055664 ], [ "taux", -11.84532642364502 ], [ "east", -11.84560775756836 ], [ "▁decorating", -11.84565258026123 ], [ "tip", -11.84565544128418 ], [ "▁Communications", -11.845780372619629 ], [ "ACE", -11.84580135345459 ], [ "▁Consul", -11.845993041992188 ], [ "▁Swiss", -11.846197128295898 ], [ "inci", -11.846230506896973 ], [ "▁Fact", -11.846312522888184 ], [ "▁ajung", -11.846321105957031 ], [ "▁airline", -11.846325874328613 ], [ "▁kidney", -11.846379280090332 ], [ "▁Records", -11.84642505645752 ], [ "▁Olympic", -11.846747398376465 ], [ "▁dried", -11.84719467163086 ], [ "oivent", -11.847333908081055 ], [ "▁Adobe", -11.847467422485352 ], [ "▁powers", -11.847748756408691 ], [ "lande", -11.847834587097168 ], [ "▁relieve", -11.847858428955078 ], [ "ţine", -11.847898483276367 ], [ "▁gradually", -11.847945213317871 ], [ "mud", -11.84811019897461 ], [ "▁30,", -11.848116874694824 ], [ "▁plante", -11.848133087158203 ], [ "▁Hug", -11.848225593566895 ], [ "▁Focus", -11.84853458404541 ], [ "▁distinctive", -11.848594665527344 ], [ "▁Bab", -11.848662376403809 ], [ "tata", -11.848679542541504 ], [ "▁Nun", -11.848797798156738 ], [ "▁Eve", -11.848811149597168 ], [ "▁déc", -11.848881721496582 ], [ "▁Beitrag", -11.84900951385498 ], [ "▁devenit", -11.849042892456055 ], [ "driven", -11.849250793457031 ], [ "▁offerings", -11.84933853149414 ], [ "▁exc", -11.84941577911377 ], [ "encies", -11.849576950073242 ], [ "▁Neuro", -11.849588394165039 ], [ "scher", -11.849604606628418 ], [ "map", -11.849703788757324 ], [ "pending", -11.849783897399902 ], [ "▁courage", -11.849799156188965 ], [ "axe", -11.849894523620605 ], [ "▁Gesellschaft", -11.849900245666504 ], [ "▁ears", -11.85000991821289 ], [ "▁aider", -11.850403785705566 ], [ "▁Cast", -11.85042667388916 ], [ "fast", -11.850442886352539 ], [ "▁departe", -11.850502014160156 ], [ "▁oak", -11.850507736206055 ], [ "▁batch", -11.850730895996094 ], [ "▁Corporate", -11.850762367248535 ], [ "▁Ost", -11.850895881652832 ], [ "-14", -11.850897789001465 ], [ "▁Pie", -11.85115909576416 ], [ "▁ranking", -11.851273536682129 ], [ "clusion", -11.851316452026367 ], [ "▁costume", -11.851347923278809 ], [ "▁Knight", -11.851449966430664 ], [ "▁privat", -11.851577758789062 ], [ "▁Engineer", -11.851593971252441 ], [ "▁gens", -11.8517427444458 ], [ "physics", -11.85176944732666 ], [ "generating", -11.851773262023926 ], [ "directement", -11.851786613464355 ], [ "▁confidential", -11.851810455322266 ], [ "▁poet", -11.851937294006348 ], [ "▁monster", -11.851944923400879 ], [ "▁suppose", -11.851984977722168 ], [ "său", -11.851996421813965 ], [ "▁balls", -11.852103233337402 ], [ "▁substitute", -11.852137565612793 ], [ "▁simultaneously", -11.852238655090332 ], [ "▁specify", -11.852272033691406 ], [ "wald", -11.852287292480469 ], [ "▁collapse", -11.852352142333984 ], [ "dessus", -11.852458953857422 ], [ "▁vitr", -11.852516174316406 ], [ "▁recruitment", -11.852607727050781 ], [ "denken", -11.852632522583008 ], [ "▁candy", -11.852691650390625 ], [ "▁tourists", -11.852721214294434 ], [ "dimensional", -11.852782249450684 ], [ "conce", -11.852814674377441 ], [ "wechsel", -11.852822303771973 ], [ "▁passende", -11.852971076965332 ], [ "industrie", -11.85299301147461 ], [ "agne", -11.853127479553223 ], [ "▁warehouse", -11.853233337402344 ], [ "▁Jugend", -11.853277206420898 ], [ "▁Weise", -11.853357315063477 ], [ "▁Zone", -11.853528022766113 ], [ "▁licence", -11.853550910949707 ], [ "▁broker", -11.853630065917969 ], [ "▁Rolle", -11.85365104675293 ], [ "pton", -11.853789329528809 ], [ "▁preference", -11.853846549987793 ], [ "▁homeowners", -11.853861808776855 ], [ "▁Lum", -11.85387134552002 ], [ "▁Chairman", -11.853879928588867 ], [ "▁Pages", -11.853998184204102 ], [ "▁beam", -11.854005813598633 ], [ "▁coordinate", -11.854158401489258 ], [ "▁Tool", -11.854212760925293 ], [ "▁complexity", -11.854272842407227 ], [ "▁checks", -11.854339599609375 ], [ "▁Bedroom", -11.854405403137207 ], [ "minded", -11.854538917541504 ], [ "▁copiii", -11.854694366455078 ], [ "▁celebrating", -11.85470199584961 ], [ "zimmer", -11.854759216308594 ], [ "▁Imagine", -11.854759216308594 ], [ "▁decoration", -11.854830741882324 ], [ "team", -11.855354309082031 ], [ "▁împreună", -11.855369567871094 ], [ "▁publicly", -11.855391502380371 ], [ "▁centuries", -11.855514526367188 ], [ "▁Islands", -11.855644226074219 ], [ "▁ethnic", -11.855663299560547 ], [ "still", -11.85576057434082 ], [ "stieg", -11.855823516845703 ], [ "emia", -11.855904579162598 ], [ "tags", -11.856026649475098 ], [ "▁marche", -11.856062889099121 ], [ "▁migration", -11.856096267700195 ], [ "▁banner", -11.85616683959961 ], [ "▁macro", -11.856378555297852 ], [ "▁Edit", -11.856379508972168 ], [ "tran", -11.85656452178955 ], [ "ça", -11.856597900390625 ], [ "▁recycling", -11.856670379638672 ], [ "▁1,000", -11.856673240661621 ], [ "▁Quelle", -11.856891632080078 ], [ "▁Vel", -11.85700511932373 ], [ "▁Rit", -11.857025146484375 ], [ "▁Spaß", -11.857046127319336 ], [ "▁Corn", -11.857074737548828 ], [ "tracted", -11.857177734375 ], [ "cited", -11.857185363769531 ], [ "▁tablets", -11.857202529907227 ], [ "▁Display", -11.857337951660156 ], [ "▁persoana", -11.857392311096191 ], [ "Term", -11.857410430908203 ], [ "▁Vancouver", -11.857537269592285 ], [ "▁Gäste", -11.857550621032715 ], [ "determining", -11.857608795166016 ], [ "▁populations", -11.85778522491455 ], [ "aison", -11.857873916625977 ], [ "▁surgical", -11.858072280883789 ], [ "tale", -11.858160018920898 ], [ "ivi", -11.858283042907715 ], [ "▁Zur", -11.858388900756836 ], [ "esprit", -11.858574867248535 ], [ "▁Edge", -11.858665466308594 ], [ "dach", -11.858760833740234 ], [ "phi", -11.858773231506348 ], [ "▁suc", -11.858841896057129 ], [ "▁scrie", -11.858848571777344 ], [ "▁Ausbildung", -11.858885765075684 ], [ "▁51", -11.85892391204834 ], [ "ologi", -11.858938217163086 ], [ "▁correction", -11.859049797058105 ], [ "▁Wald", -11.859078407287598 ], [ "▁additionally", -11.859131813049316 ], [ "▁proche", -11.859353065490723 ], [ "▁classical", -11.859477996826172 ], [ "▁bringen", -11.859490394592285 ], [ "▁(10", -11.859611511230469 ], [ "▁Mile", -11.859809875488281 ], [ "lace", -11.859885215759277 ], [ "▁premi", -11.85988712310791 ], [ "▁constitute", -11.860029220581055 ], [ "▁bitter", -11.860078811645508 ], [ "▁Inform", -11.860295295715332 ], [ "▁corporations", -11.860334396362305 ], [ "▁Lisa", -11.860494613647461 ], [ "▁obligat", -11.860685348510742 ], [ "Throughout", -11.860738754272461 ], [ "▁Rs", -11.860769271850586 ], [ "▁Hair", -11.860916137695312 ], [ "▁supplements", -11.86099624633789 ], [ "▁motorcycle", -11.861054420471191 ], [ "escent", -11.861132621765137 ], [ "▁investi", -11.861222267150879 ], [ "▁continuously", -11.861265182495117 ], [ "▁Essen", -11.861334800720215 ], [ "▁precision", -11.8613862991333 ], [ "▁deficit", -11.861461639404297 ], [ "▁wallet", -11.861481666564941 ], [ "▁Bürger", -11.861531257629395 ], [ "chir", -11.861574172973633 ], [ "9)", -11.86161994934082 ], [ "▁Programme", -11.861716270446777 ], [ "▁simplement", -11.86193561553955 ], [ "MD", -11.862093925476074 ], [ "▁rouge", -11.862096786499023 ], [ "usion", -11.862133979797363 ], [ "▁stove", -11.862208366394043 ], [ "▁prospective", -11.862224578857422 ], [ "▁corp", -11.86234188079834 ], [ "▁impacts", -11.862401008605957 ], [ "▁bride", -11.86266803741455 ], [ "0.0", -11.862788200378418 ], [ "hid", -11.862833976745605 ], [ "▁warrant", -11.862930297851562 ], [ "▁Ice", -11.8631010055542 ], [ "▁sensible", -11.863151550292969 ], [ "▁vreo", -11.863166809082031 ], [ "spekt", -11.863249778747559 ], [ "▁appreciation", -11.8633394241333 ], [ "▁automation", -11.863377571105957 ], [ "Luc", -11.86341381072998 ], [ "teaches", -11.863471031188965 ], [ "▁fold", -11.863506317138672 ], [ "deutsche", -11.863523483276367 ], [ "▁assisted", -11.86380386352539 ], [ "▁straightforward", -11.863932609558105 ], [ "▁mechanic", -11.864068031311035 ], [ "observ", -11.864169120788574 ], [ "▁Schau", -11.864195823669434 ], [ "▁Recently", -11.864301681518555 ], [ "kers", -11.86435604095459 ], [ "▁Soft", -11.864455223083496 ], [ "muni", -11.864537239074707 ], [ "▁lie", -11.864617347717285 ], [ "▁Fat", -11.864728927612305 ], [ "cream", -11.86476993560791 ], [ "▁snack", -11.864909172058105 ], [ "▁juin", -11.865068435668945 ], [ "▁competent", -11.865134239196777 ], [ "▁Drug", -11.865141868591309 ], [ "▁Row", -11.865302085876465 ], [ "▁needle", -11.865852355957031 ], [ "▁convey", -11.865900039672852 ], [ "▁voie", -11.86600399017334 ], [ "▁Hon", -11.866190910339355 ], [ "▁ebook", -11.866194725036621 ], [ "▁veteran", -11.866209030151367 ], [ "▁statistical", -11.866217613220215 ], [ "190", -11.866312980651855 ], [ "▁munca", -11.866402626037598 ], [ "▁venues", -11.866438865661621 ], [ "▁Viel", -11.866604804992676 ], [ "▁décor", -11.866799354553223 ], [ "▁répond", -11.8670015335083 ], [ "▁produsele", -11.86700439453125 ], [ "ruc", -11.867009162902832 ], [ "▁drops", -11.867011070251465 ], [ "▁autant", -11.867311477661133 ], [ "▁Fahrzeug", -11.867313385009766 ], [ "▁hills", -11.86735725402832 ], [ "ference", -11.867414474487305 ], [ "▁Glück", -11.86742115020752 ], [ "▁Pac", -11.867480278015137 ], [ "▁permettr", -11.867568969726562 ], [ "▁mouvement", -11.867713928222656 ], [ "établissement", -11.867859840393066 ], [ "▁Parc", -11.867874145507812 ], [ "▁solving", -11.867900848388672 ], [ "▁jail", -11.867972373962402 ], [ "▁junk", -11.867980003356934 ], [ "▁jeux", -11.868091583251953 ], [ "▁rôle", -11.868107795715332 ], [ "▁cache", -11.868124961853027 ], [ "▁Answer", -11.86832046508789 ], [ "wir", -11.868706703186035 ], [ "option", -11.868732452392578 ], [ "▁Tiger", -11.868739128112793 ], [ "▁Ble", -11.868793487548828 ], [ "Mitglied", -11.868797302246094 ], [ "▁partial", -11.868819236755371 ], [ "▁Mercedes", -11.86888313293457 ], [ "tire", -11.869001388549805 ], [ "MENT", -11.869091987609863 ], [ "▁transit", -11.869230270385742 ], [ "▁cineva", -11.869285583496094 ], [ "▁Andrea", -11.869294166564941 ], [ "▁boundaries", -11.869497299194336 ], [ "script", -11.870061874389648 ], [ "▁Medi", -11.870123863220215 ], [ "schreiben", -11.870203018188477 ], [ "▁lobby", -11.87035846710205 ], [ "▁defendant", -11.870406150817871 ], [ "▁sq", -11.870467185974121 ], [ "▁forgotten", -11.870569229125977 ], [ "stimmung", -11.870651245117188 ], [ "hus", -11.870665550231934 ], [ "RY", -11.870728492736816 ], [ "▁Anderson", -11.870748519897461 ], [ "▁Dental", -11.870828628540039 ], [ "ject", -11.87110710144043 ], [ "▁Nutzer", -11.871377944946289 ], [ "▁Portland", -11.871540069580078 ], [ "scription", -11.871636390686035 ], [ "▁angel", -11.871695518493652 ], [ "▁monument", -11.871748924255371 ], [ "▁număr", -11.871784210205078 ], [ "▁Lane", -11.871800422668457 ], [ "▁Bai", -11.871894836425781 ], [ "But", -11.871909141540527 ], [ "▁calculate", -11.872315406799316 ], [ "▁provoca", -11.87247371673584 ], [ "▁votes", -11.872493743896484 ], [ "RNA", -11.872503280639648 ], [ "though", -11.87259292602539 ], [ "spor", -11.872631072998047 ], [ "▁connaissance", -11.872695922851562 ], [ "▁Anwendung", -11.872932434082031 ], [ "▁Kate", -11.873123168945312 ], [ "lob", -11.87315845489502 ], [ "▁Conf", -11.873180389404297 ], [ "bung", -11.873212814331055 ], [ "ander", -11.873282432556152 ], [ "▁functioning", -11.873297691345215 ], [ "▁sponsored", -11.873324394226074 ], [ "rav", -11.873734474182129 ], [ "▁resistant", -11.873797416687012 ], [ "tră", -11.873916625976562 ], [ "▁costly", -11.873923301696777 ], [ "▁Mars", -11.873991012573242 ], [ "▁tir", -11.874075889587402 ], [ "▁writes", -11.874134063720703 ], [ "▁Greg", -11.874267578125 ], [ "▁Question", -11.874714851379395 ], [ "▁corporation", -11.87485408782959 ], [ "▁lire", -11.874991416931152 ], [ "locked", -11.875048637390137 ], [ "8,", -11.875092506408691 ], [ "▁sagt", -11.875301361083984 ], [ "gaining", -11.87536907196045 ], [ "▁Pierre", -11.875688552856445 ], [ "verb", -11.875725746154785 ], [ "▁Barcelona", -11.87578296661377 ], [ "werte", -11.876474380493164 ], [ "▁disponible", -11.87651538848877 ], [ "▁urge", -11.876521110534668 ], [ "▁expecting", -11.876572608947754 ], [ "▁Girl", -11.87662124633789 ], [ "▁unlimited", -11.876761436462402 ], [ "watt", -11.876788139343262 ], [ "▁Möglichkeiten", -11.876813888549805 ], [ "▁schöne", -11.876847267150879 ], [ "rium", -11.877076148986816 ], [ "That", -11.877272605895996 ], [ "▁socio", -11.877296447753906 ], [ "▁Democrats", -11.877351760864258 ], [ "guten", -11.877422332763672 ], [ "▁Lou", -11.877425193786621 ], [ "ităţi", -11.877559661865234 ], [ "▁possibilité", -11.877717018127441 ], [ "▁adjustable", -11.877938270568848 ], [ "▁Salt", -11.877967834472656 ], [ "Thr", -11.878021240234375 ], [ "▁biseric", -11.878056526184082 ], [ "ieux", -11.87808895111084 ], [ "▁procur", -11.8782377243042 ], [ "▁credits", -11.878250122070312 ], [ "▁Netflix", -11.878585815429688 ], [ "doi", -11.878605842590332 ], [ "▁Jews", -11.878663063049316 ], [ "▁Ukraine", -11.87873363494873 ], [ "▁adevărat", -11.878785133361816 ], [ "▁Apply", -11.878813743591309 ], [ "▁coupons", -11.878859519958496 ], [ "▁Detroit", -11.878881454467773 ], [ "▁rue", -11.878889083862305 ], [ "anumite", -11.878926277160645 ], [ "ished", -11.878973960876465 ], [ "▁withdrawal", -11.87915325164795 ], [ "▁replacing", -11.87917709350586 ], [ "catching", -11.879385948181152 ], [ "▁climbing", -11.879612922668457 ], [ "▁Basic", -11.879770278930664 ], [ "▁inclus", -11.879783630371094 ], [ "scope", -11.879887580871582 ], [ "▁facem", -11.879892349243164 ], [ "▁plec", -11.879904747009277 ], [ "mäßig", -11.879980087280273 ], [ "▁tasty", -11.880064010620117 ], [ "▁tunnel", -11.880074501037598 ], [ "figured", -11.88032341003418 ], [ "gged", -11.880390167236328 ], [ "▁conditii", -11.880599975585938 ], [ "▁homework", -11.880631446838379 ], [ "volle", -11.88063907623291 ], [ "▁Gott", -11.880807876586914 ], [ "▁95", -11.880969047546387 ], [ "▁elect", -11.881020545959473 ], [ "▁blast", -11.881043434143066 ], [ "▁easiest", -11.881248474121094 ], [ "USE", -11.881462097167969 ], [ "concentr", -11.881475448608398 ], [ "orial", -11.881596565246582 ], [ "▁scroll", -11.881638526916504 ], [ "stead", -11.881691932678223 ], [ "▁hormone", -11.881710052490234 ], [ "▁starter", -11.88179874420166 ], [ "▁cald", -11.881878852844238 ], [ "▁wax", -11.881895065307617 ], [ "▁ridic", -11.881900787353516 ], [ "ously", -11.881982803344727 ], [ "maschine", -11.882101058959961 ], [ "licher", -11.882399559020996 ], [ "▁16,", -11.882452964782715 ], [ "▁hassle", -11.882469177246094 ], [ "semnat", -11.882535934448242 ], [ "▁pub", -11.88260555267334 ], [ "240", -11.882800102233887 ], [ "▁kits", -11.882871627807617 ], [ "▁Generation", -11.88293743133545 ], [ "▁merchant", -11.883052825927734 ], [ "▁Erd", -11.883068084716797 ], [ "▁café", -11.883077621459961 ], [ "hoff", -11.88314151763916 ], [ "▁WITH", -11.883376121520996 ], [ "▁gesch", -11.883515357971191 ], [ "▁Editor", -11.883557319641113 ], [ "▁treats", -11.883609771728516 ], [ "▁harsh", -11.883711814880371 ], [ "rome", -11.883729934692383 ], [ "▁Foreign", -11.883928298950195 ], [ "▁denied", -11.883968353271484 ], [ "▁Valentine", -11.884014129638672 ], [ "▁healthier", -11.88408088684082 ], [ "▁readily", -11.884138107299805 ], [ "nac", -11.884190559387207 ], [ "▁intake", -11.884191513061523 ], [ "▁puncte", -11.884230613708496 ], [ "erne", -11.884431838989258 ], [ "file", -11.884668350219727 ], [ "▁continually", -11.884688377380371 ], [ "door", -11.884699821472168 ], [ "▁imediat", -11.884822845458984 ], [ "▁accused", -11.884833335876465 ], [ "chy", -11.884854316711426 ], [ "▁wrapped", -11.884861946105957 ], [ "IES", -11.884878158569336 ], [ "▁terrace", -11.884883880615234 ], [ "mouth", -11.884897232055664 ], [ "▁defensive", -11.884991645812988 ], [ "▁Luci", -11.88508129119873 ], [ "▁significance", -11.885107040405273 ], [ "▁2007,", -11.885213851928711 ], [ "▁inclusion", -11.885221481323242 ], [ "▁rotation", -11.885248184204102 ], [ "hos", -11.885283470153809 ], [ "▁crea", -11.885357856750488 ], [ "üß", -11.885903358459473 ], [ "▁Install", -11.885988235473633 ], [ "▁dump", -11.885998725891113 ], [ "▁informations", -11.886114120483398 ], [ "▁Thi", -11.886117935180664 ], [ "▁85", -11.886252403259277 ], [ "dox", -11.886283874511719 ], [ "track", -11.886436462402344 ], [ "▁couples", -11.886571884155273 ], [ "▁Assembly", -11.886594772338867 ], [ "wagen", -11.88672161102295 ], [ "▁Hil", -11.886723518371582 ], [ "ières", -11.886833190917969 ], [ "▁Gabriel", -11.886903762817383 ], [ "▁patience", -11.887053489685059 ], [ "▁colored", -11.887147903442383 ], [ "▁separately", -11.88715934753418 ], [ "▁deployment", -11.887166023254395 ], [ "scape", -11.887306213378906 ], [ "▁Acum", -11.8875150680542 ], [ "▁länger", -11.887518882751465 ], [ "▁screens", -11.887598991394043 ], [ "▁prezenta", -11.887630462646484 ], [ "▁obicei", -11.887638092041016 ], [ "▁crisp", -11.887758255004883 ], [ "▁mechanisms", -11.887771606445312 ], [ "▁thirty", -11.887786865234375 ], [ "▁individually", -11.887989044189453 ], [ "▁internationally", -11.887991905212402 ], [ "lling", -11.888050079345703 ], [ "▁bureau", -11.88843059539795 ], [ "▁erfahren", -11.88844108581543 ], [ "TY", -11.888553619384766 ], [ "PF", -11.888607025146484 ], [ "wid", -11.888752937316895 ], [ "sell", -11.888835906982422 ], [ "▁Luke", -11.888879776000977 ], [ "▁Must", -11.888916969299316 ], [ "▁identical", -11.888927459716797 ], [ "▁Netherlands", -11.888980865478516 ], [ "▁investor", -11.88905143737793 ], [ "▁squad", -11.889073371887207 ], [ "▁21,", -11.889143943786621 ], [ "iko", -11.889230728149414 ], [ "▁departure", -11.88937759399414 ], [ "ega", -11.889384269714355 ], [ "uzi", -11.889408111572266 ], [ "▁lasa", -11.889458656311035 ], [ "bian", -11.889525413513184 ], [ "▁Madrid", -11.889623641967773 ], [ "▁Iowa", -11.889806747436523 ], [ "▁Yellow", -11.890026092529297 ], [ "conom", -11.89004898071289 ], [ "▁hint", -11.890098571777344 ], [ "NOW", -11.890111923217773 ], [ "dress", -11.890204429626465 ], [ "▁Stück", -11.890267372131348 ], [ "echt", -11.890424728393555 ], [ "rial", -11.89045238494873 ], [ "▁Initiative", -11.890474319458008 ], [ "▁magnificent", -11.890474319458008 ], [ "▁pipeline", -11.890543937683105 ], [ "▁08", -11.890806198120117 ], [ "▁écrit", -11.890889167785645 ], [ "KA", -11.891085624694824 ], [ "arile", -11.891151428222656 ], [ "▁unfortunately", -11.891352653503418 ], [ "dose", -11.891355514526367 ], [ "▁counts", -11.891427993774414 ], [ "deciding", -11.891549110412598 ], [ "WA", -11.89167308807373 ], [ "▁doresc", -11.891685485839844 ], [ "NY", -11.892008781433105 ], [ "olin", -11.892112731933594 ], [ "▁Urlaub", -11.892133712768555 ], [ "▁alătur", -11.892317771911621 ], [ "▁Vic", -11.892515182495117 ], [ "▁fier", -11.89269733428955 ], [ "EU", -11.892772674560547 ], [ "▁triple", -11.892871856689453 ], [ "▁compliment", -11.89310359954834 ], [ "▁vegetable", -11.89334487915039 ], [ "member", -11.893743515014648 ], [ "atiei", -11.893793106079102 ], [ "▁toxic", -11.893835067749023 ], [ "▁converted", -11.893888473510742 ], [ "▁Pink", -11.893999099731445 ], [ "▁fragment", -11.894020080566406 ], [ "presenting", -11.894027709960938 ], [ "▁garantie", -11.894031524658203 ], [ "▁31,", -11.894052505493164 ], [ "▁puisqu", -11.894105911254883 ], [ "aching", -11.894107818603516 ], [ "▁Shan", -11.894119262695312 ], [ "▁Affairs", -11.894368171691895 ], [ "üsse", -11.894405364990234 ], [ "▁CBD", -11.894428253173828 ], [ "▁quatre", -11.894588470458984 ], [ "▁horror", -11.894651412963867 ], [ "▁culoare", -11.894661903381348 ], [ "▁welcoming", -11.894673347473145 ], [ "▁headache", -11.894808769226074 ], [ "▁septembre", -11.894820213317871 ], [ "▁Tür", -11.894862174987793 ], [ "lateral", -11.89507007598877 ], [ "▁termin", -11.895228385925293 ], [ "▁Aid", -11.895291328430176 ], [ "second", -11.895308494567871 ], [ "▁Philip", -11.895310401916504 ], [ "berries", -11.895347595214844 ], [ "▁Slot", -11.895431518554688 ], [ "ка", -11.895442962646484 ], [ "▁consecutive", -11.895590782165527 ], [ "value", -11.895705223083496 ], [ "▁islands", -11.8958101272583 ], [ "▁posibilitatea", -11.895928382873535 ], [ "0.5", -11.896341323852539 ], [ "▁Dumpster", -11.896471977233887 ], [ "▁Gran", -11.89647388458252 ], [ "▁restricted", -11.8967924118042 ], [ "▁discussing", -11.896921157836914 ], [ "cock", -11.896966934204102 ], [ "Serie", -11.896989822387695 ], [ "▁crushing", -11.896998405456543 ], [ "RB", -11.897034645080566 ], [ "▁Gy", -11.897068977355957 ], [ "normal", -11.897098541259766 ], [ "DT", -11.897180557250977 ], [ "▁concurs", -11.897181510925293 ], [ "▁Beratung", -11.897231101989746 ], [ "▁handful", -11.897235870361328 ], [ "▁loading", -11.897237777709961 ], [ "▁WI", -11.897269248962402 ], [ "▁Fitness", -11.897283554077148 ], [ "▁RAM", -11.897302627563477 ], [ "▁Twi", -11.89730453491211 ], [ "adurch", -11.897345542907715 ], [ "▁obiectiv", -11.897366523742676 ], [ "BM", -11.897635459899902 ], [ "▁amendment", -11.8976469039917 ], [ "whi", -11.897652626037598 ], [ "▁Besonder", -11.897871017456055 ], [ "ALL", -11.898003578186035 ], [ "▁earning", -11.898090362548828 ], [ "▁nutrients", -11.898580551147461 ], [ "pru", -11.898633003234863 ], [ "▁offensive", -11.898696899414062 ], [ "▁shelves", -11.898711204528809 ], [ "▁încâ", -11.898726463317871 ], [ "▁execute", -11.898923873901367 ], [ "▁cauz", -11.898966789245605 ], [ "exist", -11.899179458618164 ], [ "▁Meter", -11.899191856384277 ], [ "there", -11.899201393127441 ], [ "▁réaliser", -11.899249076843262 ], [ "blog", -11.899362564086914 ], [ "▁résultats", -11.89937973022461 ], [ "baren", -11.899391174316406 ], [ "▁lang", -11.899425506591797 ], [ "▁mere", -11.899870872497559 ], [ "▁toti", -11.900079727172852 ], [ "DN", -11.90017032623291 ], [ "Hi", -11.900310516357422 ], [ "▁merg", -11.900359153747559 ], [ "▁Camera", -11.90054988861084 ], [ "▁parfum", -11.900697708129883 ], [ "CG", -11.900701522827148 ], [ "posed", -11.900713920593262 ], [ "▁proposals", -11.900732040405273 ], [ "▁incorrect", -11.900811195373535 ], [ "▁Denver", -11.901168823242188 ], [ "▁noapte", -11.901397705078125 ], [ "▁VPN", -11.901436805725098 ], [ "▁Oklahoma", -11.90159797668457 ], [ "horizon", -11.901647567749023 ], [ "▁villa", -11.901668548583984 ], [ "duce", -11.901812553405762 ], [ "Dienst", -11.902042388916016 ], [ "▁oversee", -11.902511596679688 ], [ "astr", -11.902548789978027 ], [ "brand", -11.902713775634766 ], [ "▁Safe", -11.902746200561523 ], [ "▁competing", -11.902812004089355 ], [ "▁subiect", -11.902812004089355 ], [ "▁équipe", -11.903091430664062 ], [ "▁Dress", -11.903095245361328 ], [ "▁Juni", -11.903139114379883 ], [ "▁repeated", -11.90317153930664 ], [ "2012", -11.903226852416992 ], [ "▁départ", -11.903234481811523 ], [ "immer", -11.903335571289062 ], [ "▁mondial", -11.903374671936035 ], [ "▁datelor", -11.903703689575195 ], [ "▁surgeon", -11.903782844543457 ], [ "▁demanding", -11.903812408447266 ], [ "▁concluded", -11.903878211975098 ], [ "țiile", -11.903950691223145 ], [ "marin", -11.903999328613281 ], [ "▁estim", -11.904206275939941 ], [ "▁Loan", -11.904361724853516 ], [ "sculpt", -11.904373168945312 ], [ "▁99", -11.904391288757324 ], [ "void", -11.904400825500488 ], [ "▁Empire", -11.904499053955078 ], [ "▁Brit", -11.90450382232666 ], [ "▁véhicule", -11.904777526855469 ], [ "▁dividend", -11.905069351196289 ], [ "▁refused", -11.905077934265137 ], [ "▁speaks", -11.905156135559082 ], [ "▁Morris", -11.905282020568848 ], [ "dict", -11.905349731445312 ], [ "▁funeral", -11.905556678771973 ], [ "▁Behandlung", -11.905763626098633 ], [ "▁Revolution", -11.905905723571777 ], [ "▁Sum", -11.905935287475586 ], [ "einigen", -11.906030654907227 ], [ "RES", -11.906070709228516 ], [ "▁vite", -11.906071662902832 ], [ "▁Captain", -11.906190872192383 ], [ "▁assurance", -11.9061918258667 ], [ "uga", -11.906500816345215 ], [ "▁conserv", -11.906583786010742 ], [ "▁therapeutic", -11.906641006469727 ], [ "▁Sweden", -11.906753540039062 ], [ "▁Lead", -11.906888961791992 ], [ "ément", -11.907071113586426 ], [ "▁53", -11.90709114074707 ], [ "▁fraction", -11.9071683883667 ], [ "▁magnet", -11.907170295715332 ], [ "assurer", -11.907184600830078 ], [ "▁Steuer", -11.90733814239502 ], [ "▁flori", -11.90735149383545 ], [ "▁charming", -11.907588958740234 ], [ "▁athletic", -11.907621383666992 ], [ "▁membri", -11.907706260681152 ], [ "▁Sep", -11.907726287841797 ], [ "ogue", -11.907800674438477 ], [ "▁familie", -11.907800674438477 ], [ "▁SW", -11.90796947479248 ], [ "▁diagnosed", -11.908023834228516 ], [ "RR", -11.908143997192383 ], [ "▁Fern", -11.908233642578125 ], [ "▁rational", -11.908281326293945 ], [ "▁talents", -11.90828800201416 ], [ "ziert", -11.908317565917969 ], [ "▁chemin", -11.908459663391113 ], [ "sheet", -11.908562660217285 ], [ "▁outer", -11.908565521240234 ], [ "▁Kap", -11.908591270446777 ], [ "▁HERE", -11.908656120300293 ], [ "▁uman", -11.908824920654297 ], [ "▁accompany", -11.908880233764648 ], [ "▁varieties", -11.908881187438965 ], [ "▁sensors", -11.908957481384277 ], [ "▁25%", -11.90919017791748 ], [ "▁tray", -11.909354209899902 ], [ "▁critique", -11.909459114074707 ], [ "▁puţin", -11.909515380859375 ], [ "▁Schüler", -11.90953540802002 ], [ "▁repar", -11.909744262695312 ], [ "▁overlook", -11.909931182861328 ], [ "▁surf", -11.910048484802246 ], [ "▁tasting", -11.910118103027344 ], [ "bog", -11.91027545928955 ], [ "▁Payment", -11.910289764404297 ], [ "▁Helen", -11.91049575805664 ], [ "▁Refer", -11.910694122314453 ], [ "application", -11.910698890686035 ], [ "lection", -11.910856246948242 ], [ "▁avril", -11.911042213439941 ], [ "▁Grace", -11.911109924316406 ], [ "▁kau", -11.911274909973145 ], [ "▁libraries", -11.911319732666016 ], [ "▁closest", -11.911347389221191 ], [ "▁coating", -11.911351203918457 ], [ "▁suicide", -11.911364555358887 ], [ "▁undergraduate", -11.911449432373047 ], [ "▁stitch", -11.91149616241455 ], [ "▁reset", -11.911593437194824 ], [ "▁Greece", -11.911626815795898 ], [ "▁Fred", -11.91197681427002 ], [ "▁18.", -11.912047386169434 ], [ "▁nuit", -11.912087440490723 ], [ "▁lying", -11.912199974060059 ], [ "▁cottage", -11.91232681274414 ], [ "bone", -11.912477493286133 ], [ "▁milieu", -11.912480354309082 ], [ "management", -11.912623405456543 ], [ "▁Freund", -11.912724494934082 ], [ "▁specially", -11.912841796875 ], [ "veut", -11.912961959838867 ], [ "▁necesare", -11.912999153137207 ], [ "▁cert", -11.913081169128418 ], [ "articul", -11.913151741027832 ], [ "150", -11.913174629211426 ], [ "rounded", -11.913180351257324 ], [ "▁longue", -11.913193702697754 ], [ "▁Quel", -11.913240432739258 ], [ "Until", -11.913322448730469 ], [ "▁700", -11.913398742675781 ], [ "▁installations", -11.913423538208008 ], [ "▁boats", -11.913467407226562 ], [ "Fig", -11.913609504699707 ], [ "▁cocktail", -11.913613319396973 ], [ "▁rocks", -11.91366958618164 ], [ "meinen", -11.91374683380127 ], [ "entrepreneur", -11.913780212402344 ], [ "schwarz", -11.913924217224121 ], [ "▁diesel", -11.91392993927002 ], [ "▁villages", -11.913969039916992 ], [ "▁cups", -11.914076805114746 ], [ "▁stairs", -11.914241790771484 ], [ "▁Match", -11.914350509643555 ], [ "Taking", -11.914437294006348 ], [ "prin", -11.914469718933105 ], [ "▁penal", -11.91472053527832 ], [ "partner", -11.914867401123047 ], [ "wave", -11.91497802734375 ], [ "▁baie", -11.91515064239502 ], [ "LAN", -11.915151596069336 ], [ "fix", -11.915202140808105 ], [ "▁surveillance", -11.915295600891113 ], [ "▁Register", -11.915343284606934 ], [ "oara", -11.915536880493164 ], [ "▁Phoenix", -11.915602684020996 ], [ "aktuellen", -11.915613174438477 ], [ "▁livres", -11.915618896484375 ], [ "▁entities", -11.916102409362793 ], [ "▁Regard", -11.916112899780273 ], [ "▁Jazz", -11.91614055633545 ], [ "▁flame", -11.91616153717041 ], [ "▁independence", -11.916215896606445 ], [ "▁Adventure", -11.916341781616211 ], [ "▁assign", -11.916399955749512 ], [ "▁Adult", -11.916579246520996 ], [ "kehr", -11.916666984558105 ], [ "▁ordering", -11.916850090026855 ], [ "▁charts", -11.91687297821045 ], [ "▁Român", -11.916936874389648 ], [ "bauen", -11.916982650756836 ], [ "▁Floor", -11.917065620422363 ], [ "▁Meet", -11.917101860046387 ], [ "▁compromise", -11.917158126831055 ], [ "regarded", -11.917171478271484 ], [ "02.", -11.917215347290039 ], [ "▁granite", -11.917299270629883 ], [ "▁Judge", -11.917314529418945 ], [ "opti", -11.917373657226562 ], [ "liste", -11.917379379272461 ], [ "▁capacité", -11.917427062988281 ], [ "▁criticism", -11.917450904846191 ], [ "LES", -11.918198585510254 ], [ "▁Century", -11.918211936950684 ], [ "▁mobility", -11.918252944946289 ], [ "▁variation", -11.918622016906738 ], [ "▁Utah", -11.91867446899414 ], [ "▁seminar", -11.918678283691406 ], [ "▁experiments", -11.918803215026855 ], [ "midst", -11.918943405151367 ], [ "▁Psycho", -11.919002532958984 ], [ "▁choses", -11.919121742248535 ], [ "▁Karl", -11.919175148010254 ], [ "▁ruling", -11.919286727905273 ], [ "▁Voice", -11.919404983520508 ], [ "▁împotriv", -11.919442176818848 ], [ "▁mesaj", -11.919500350952148 ], [ "▁vrei", -11.919594764709473 ], [ "fan", -11.919601440429688 ], [ "parent", -11.919648170471191 ], [ "▁oraș", -11.919770240783691 ], [ "▁printable", -11.919777870178223 ], [ "▁diver", -11.919859886169434 ], [ "▁ochi", -11.919949531555176 ], [ "▁teenager", -11.920125961303711 ], [ "▁Death", -11.920150756835938 ], [ "▁manque", -11.920289993286133 ], [ "ască", -11.920345306396484 ], [ "▁prob", -11.9203519821167 ], [ "▁télé", -11.920354843139648 ], [ "cursul", -11.920378684997559 ], [ "pion", -11.92052173614502 ], [ "▁dedication", -11.920644760131836 ], [ "▁opr", -11.920687675476074 ], [ "führung", -11.920761108398438 ], [ "▁cognitive", -11.920827865600586 ], [ "soft", -11.920868873596191 ], [ "▁19,", -11.9209623336792 ], [ "▁24-", -11.921197891235352 ], [ "▁legitimate", -11.921220779418945 ], [ "▁comedy", -11.921277046203613 ], [ "▁violation", -11.921327590942383 ], [ "▁disposal", -11.921472549438477 ], [ "▁liegen", -11.921605110168457 ], [ "ко", -11.921878814697266 ], [ "▁martie", -11.921931266784668 ], [ "▁Vas", -11.92212200164795 ], [ "rash", -11.922134399414062 ], [ "▁hadn", -11.922174453735352 ], [ "▁connu", -11.922204971313477 ], [ "▁regelmäßig", -11.922216415405273 ], [ "▁Webseite", -11.922224998474121 ], [ "▁failing", -11.922273635864258 ], [ "explique", -11.922449111938477 ], [ "▁Player", -11.922513961791992 ], [ "vul", -11.922560691833496 ], [ "camp", -11.922992706298828 ], [ "▁erreicht", -11.922996520996094 ], [ "▁tags", -11.922998428344727 ], [ "▁headline", -11.923210144042969 ], [ "▁banc", -11.923253059387207 ], [ "▁Mayor", -11.923309326171875 ], [ "trop", -11.923395156860352 ], [ "AK", -11.9235258102417 ], [ "▁lighter", -11.923602104187012 ], [ "▁syndrome", -11.923604965209961 ], [ "▁Adrian", -11.92365550994873 ], [ "▁EUR", -11.923759460449219 ], [ "▁Missouri", -11.923916816711426 ], [ "▁Chan", -11.924108505249023 ], [ "topped", -11.924233436584473 ], [ "▁nationwide", -11.924276351928711 ], [ "▁6-", -11.924302101135254 ], [ "final", -11.924408912658691 ], [ "ttes", -11.924485206604004 ], [ "▁FO", -11.924537658691406 ], [ "▁legi", -11.924556732177734 ], [ "▁Hum", -11.924575805664062 ], [ "vita", -11.924662590026855 ], [ "▁Regen", -11.924695014953613 ], [ "▁confusion", -11.92498779296875 ], [ "▁valori", -11.925142288208008 ], [ "mill", -11.92516803741455 ], [ "did", -11.925237655639648 ], [ "pid", -11.925253868103027 ], [ "▁implications", -11.925284385681152 ], [ "▁Value", -11.92552375793457 ], [ "lângă", -11.925666809082031 ], [ "▁véritable", -11.92577075958252 ], [ "▁Stick", -11.925814628601074 ], [ "zol", -11.925835609436035 ], [ "▁ebenso", -11.925863265991211 ], [ "west", -11.925895690917969 ], [ "▁auszu", -11.92600154876709 ], [ "▁adorable", -11.926016807556152 ], [ "▁clarity", -11.92605209350586 ], [ "▁Wash", -11.926335334777832 ], [ "▁alien", -11.926423072814941 ], [ "usement", -11.926626205444336 ], [ "▁bones", -11.9266357421875 ], [ "▁Beau", -11.926726341247559 ], [ "▁Jet", -11.926727294921875 ], [ "▁visibility", -11.927034378051758 ], [ "impose", -11.927063941955566 ], [ "food", -11.927133560180664 ], [ "▁duce", -11.927361488342285 ], [ "▁Format", -11.927386283874512 ], [ "▁durability", -11.927424430847168 ], [ "▁Prim", -11.927614212036133 ], [ "▁mele", -11.927629470825195 ], [ "▁dürfen", -11.927631378173828 ], [ "▁Angebote", -11.92765998840332 ], [ "▁discharge", -11.927745819091797 ], [ "▁Justin", -11.928055763244629 ], [ "▁shame", -11.928228378295898 ], [ "▁heated", -11.928282737731934 ], [ "ères", -11.92856216430664 ], [ "human", -11.928810119628906 ], [ "4.5", -11.928831100463867 ], [ "▁lien", -11.928955078125 ], [ "▁Alan", -11.92896556854248 ], [ "▁transmis", -11.929130554199219 ], [ "▁Bul", -11.929137229919434 ], [ "plu", -11.929169654846191 ], [ "acul", -11.929337501525879 ], [ "merk", -11.929434776306152 ], [ "▁altfel", -11.929566383361816 ], [ "deli", -11.929689407348633 ], [ "▁Cru", -11.930001258850098 ], [ "▁hommes", -11.930127143859863 ], [ "aurait", -11.930137634277344 ], [ "cca", -11.930187225341797 ], [ "▁Path", -11.930208206176758 ], [ "astronom", -11.930241584777832 ], [ "▁détail", -11.930276870727539 ], [ "▁blocked", -11.930394172668457 ], [ "iding", -11.93044376373291 ], [ "schä", -11.930500030517578 ], [ "▁30-", -11.930624008178711 ], [ "diction", -11.930813789367676 ], [ "▁pulling", -11.930868148803711 ], [ "▁Sample", -11.930924415588379 ], [ "▁renewable", -11.930997848510742 ], [ "▁Pinterest", -11.93106746673584 ], [ "▁Tages", -11.93106746673584 ], [ "▁shed", -11.931171417236328 ], [ "▁hart", -11.931188583374023 ], [ "▁serie", -11.931200981140137 ], [ "▁documentary", -11.931208610534668 ], [ "gebaut", -11.931220054626465 ], [ "▁Hause", -11.931272506713867 ], [ "share", -11.931303977966309 ], [ "▁inflation", -11.93138599395752 ], [ "▁gall", -11.931504249572754 ], [ "▁adjacent", -11.931673049926758 ], [ "jer", -11.93173885345459 ], [ "▁Universal", -11.931946754455566 ], [ "▁disabilities", -11.931984901428223 ], [ "▁proposition", -11.93204116821289 ], [ "Work", -11.932293891906738 ], [ "▁closure", -11.932306289672852 ], [ "▁separated", -11.932496070861816 ], [ "▁soda", -11.932549476623535 ], [ "▁elite", -11.93263053894043 ], [ "appro", -11.93265438079834 ], [ "▁acute", -11.93266487121582 ], [ "utton", -11.932938575744629 ], [ "▁facă", -11.933053016662598 ], [ "▁collector", -11.933121681213379 ], [ "▁unlock", -11.933249473571777 ], [ "▁Alpha", -11.933267593383789 ], [ "▁Used", -11.933267593383789 ], [ "▁applicants", -11.933302879333496 ], [ "▁înseamn", -11.933387756347656 ], [ "▁inclu", -11.933414459228516 ], [ "▁disclosure", -11.933544158935547 ], [ "▁Fahr", -11.933995246887207 ], [ "AST", -11.934061050415039 ], [ "▁vivre", -11.934069633483887 ], [ "»,", -11.934167861938477 ], [ "laud", -11.93430233001709 ], [ "▁soir", -11.934365272521973 ], [ "▁barrier", -11.934405326843262 ], [ "înd", -11.934470176696777 ], [ "▁ambition", -11.93451976776123 ], [ "asta", -11.934550285339355 ], [ "occupied", -11.934747695922852 ], [ "▁Gau", -11.934774398803711 ], [ "four", -11.93481159210205 ], [ "▁nap", -11.934887886047363 ], [ "iez", -11.934922218322754 ], [ "endra", -11.935242652893066 ], [ "gaben", -11.935464859008789 ], [ "▁Carol", -11.935481071472168 ], [ "▁Switzerland", -11.935575485229492 ], [ "▁Bond", -11.935617446899414 ], [ "▁crossing", -11.935630798339844 ], [ "▁Palace", -11.9359769821167 ], [ "NG", -11.935986518859863 ], [ "▁Budget", -11.93622875213623 ], [ "▁lid", -11.936372756958008 ], [ "bab", -11.936393737792969 ], [ "▁polish", -11.936416625976562 ], [ "▁herbs", -11.93673038482666 ], [ "▁dear", -11.936747550964355 ], [ "▁devrai", -11.936846733093262 ], [ "walk", -11.936864852905273 ], [ "▁humanity", -11.936897277832031 ], [ "▁tires", -11.936978340148926 ], [ "égal", -11.936994552612305 ], [ "▁bow", -11.937032699584961 ], [ "▁debris", -11.937201499938965 ], [ "▁keywords", -11.937273025512695 ], [ "irk", -11.937345504760742 ], [ "▁suspend", -11.937360763549805 ], [ "▁pourra", -11.93738079071045 ], [ "migran", -11.937454223632812 ], [ "thereby", -11.937570571899414 ], [ "▁Harris", -11.937943458557129 ], [ "ateurs", -11.937956809997559 ], [ "▁fal", -11.938271522521973 ], [ "alleged", -11.938355445861816 ], [ "noch", -11.938494682312012 ], [ "▁observation", -11.938506126403809 ], [ "▁București", -11.93855094909668 ], [ "▁SQL", -11.938624382019043 ], [ "▁Phase", -11.938760757446289 ], [ "▁adventures", -11.93881607055664 ], [ "▁Kol", -11.938885688781738 ], [ "▁professionnel", -11.938916206359863 ], [ "crit", -11.939026832580566 ], [ "LR", -11.939313888549805 ], [ "▁preview", -11.939464569091797 ], [ "▁highlighted", -11.939942359924316 ], [ "▁Stud", -11.939949035644531 ], [ "▁labour", -11.939956665039062 ], [ "MV", -11.9399995803833 ], [ "click", -11.940049171447754 ], [ "approche", -11.94016170501709 ], [ "tian", -11.940183639526367 ], [ "cité", -11.940192222595215 ], [ "▁Rain", -11.94028377532959 ], [ "typ", -11.94032096862793 ], [ "Usually", -11.940435409545898 ], [ "▁outlet", -11.940513610839844 ], [ "logging", -11.940814018249512 ], [ "▁Temperatur", -11.940906524658203 ], [ "▁Scottish", -11.94090747833252 ], [ "iga", -11.940942764282227 ], [ "▁glory", -11.941086769104004 ], [ "▁Rom", -11.941242218017578 ], [ "zeug", -11.941337585449219 ], [ "establishing", -11.941339492797852 ], [ "▁imaging", -11.941926002502441 ], [ "▁Beauty", -11.942015647888184 ], [ "igan", -11.942042350769043 ], [ "après", -11.94224739074707 ], [ "Adresse", -11.942267417907715 ], [ "cliff", -11.942349433898926 ], [ "▁unnecessary", -11.943267822265625 ], [ "▁slim", -11.943324089050293 ], [ "dir", -11.943490982055664 ], [ "▁leisure", -11.943660736083984 ], [ "▁principale", -11.94368839263916 ], [ "▁Viele", -11.943770408630371 ], [ "▁2007.", -11.943802833557129 ], [ "Hopefully", -11.943829536437988 ], [ "cola", -11.943851470947266 ], [ "▁Planet", -11.943927764892578 ], [ "▁orientation", -11.943933486938477 ], [ "▁angry", -11.94419002532959 ], [ "MIT", -11.944234848022461 ], [ "▁Kenya", -11.944265365600586 ], [ "▁bless", -11.94435977935791 ], [ "▁Fill", -11.944524765014648 ], [ "▁compar", -11.944664001464844 ], [ "▁curtain", -11.94473934173584 ], [ "ţei", -11.944754600524902 ], [ "▁Az", -11.94482421875 ], [ "▁Rang", -11.944908142089844 ], [ "▁dominant", -11.944974899291992 ], [ "race", -11.944985389709473 ], [ "▁Target", -11.944987297058105 ], [ "▁manually", -11.944987297058105 ], [ "objet", -11.945024490356445 ], [ "thrown", -11.945131301879883 ], [ "NF", -11.945149421691895 ], [ "durant", -11.945185661315918 ], [ "rect", -11.945302963256836 ], [ "▁Größe", -11.945320129394531 ], [ "VM", -11.9453763961792 ], [ "▁aprilie", -11.945476531982422 ], [ "▁Welche", -11.945639610290527 ], [ "▁verde", -11.946157455444336 ], [ "▁Portugal", -11.946266174316406 ], [ "▁algorithm", -11.94627571105957 ], [ "ăț", -11.946328163146973 ], [ "▁Grey", -11.946371078491211 ], [ "▁cleaned", -11.94644832611084 ], [ "▁modes", -11.946463584899902 ], [ "▁relaxation", -11.946599006652832 ], [ "mbr", -11.946786880493164 ], [ "étique", -11.946821212768555 ], [ "Her", -11.946904182434082 ], [ "▁beta", -11.946952819824219 ], [ "▁nobody", -11.94699764251709 ], [ "▁aplic", -11.947060585021973 ], [ "present", -11.947080612182617 ], [ "emis", -11.947197914123535 ], [ "éléments", -11.947257995605469 ], [ "▁lately", -11.947303771972656 ], [ "fab", -11.94732666015625 ], [ "▁aluminiu", -11.947373390197754 ], [ "▁vest", -11.947524070739746 ], [ "▁statue", -11.947558403015137 ], [ "▁publice", -11.947586059570312 ], [ "▁merchandise", -11.9476900100708 ], [ "▁relat", -11.947810173034668 ], [ "git", -11.94796371459961 ], [ "▁interne", -11.948281288146973 ], [ "▁Tokyo", -11.948325157165527 ], [ "chal", -11.948348045349121 ], [ "contacted", -11.948430061340332 ], [ "▁tras", -11.948455810546875 ], [ "▁Clinic", -11.948626518249512 ], [ "▁unbe", -11.948633193969727 ], [ "▁dumneavoastra", -11.948798179626465 ], [ "float", -11.949078559875488 ], [ "isson", -11.94909381866455 ], [ "▁vessel", -11.949126243591309 ], [ "attempting", -11.949161529541016 ], [ "▁doute", -11.94918441772461 ], [ "▁Leadership", -11.949322700500488 ], [ "▁sustain", -11.94947338104248 ], [ "▁textile", -11.949666023254395 ], [ "auer", -11.949702262878418 ], [ "▁90%", -11.949899673461914 ], [ "garten", -11.949911117553711 ], [ "▁adauga", -11.949991226196289 ], [ "▁Kil", -11.950061798095703 ], [ "▁troops", -11.950420379638672 ], [ "▁pale", -11.950568199157715 ], [ "host", -11.950743675231934 ], [ "▁cry", -11.950757026672363 ], [ "▁Alb", -11.950793266296387 ], [ "▁Brad", -11.95089340209961 ], [ "▁bicycle", -11.951054573059082 ], [ "▁24/7", -11.951217651367188 ], [ "▁с", -11.951228141784668 ], [ "▁stimul", -11.951401710510254 ], [ "gler", -11.951445579528809 ], [ "▁notwendig", -11.951496124267578 ], [ "▁cousin", -11.95158863067627 ], [ "cheie", -11.951600074768066 ], [ "hay", -11.951751708984375 ], [ "▁rezolv", -11.952134132385254 ], [ "▁THIS", -11.952143669128418 ], [ "ordre", -11.952157974243164 ], [ "iști", -11.952173233032227 ], [ "▁conclude", -11.952310562133789 ], [ "▁Lage", -11.952327728271484 ], [ "▁Entertainment", -11.952454566955566 ], [ "▁valued", -11.952478408813477 ], [ "ktion", -11.95253849029541 ], [ "▁priorities", -11.95268440246582 ], [ "▁1986", -11.952770233154297 ], [ "▁fatal", -11.952934265136719 ], [ "▁accurately", -11.952988624572754 ], [ "▁1987", -11.953022956848145 ], [ "▁folk", -11.953073501586914 ], [ "7)", -11.953163146972656 ], [ "führer", -11.95360279083252 ], [ "▁knot", -11.953612327575684 ], [ "haltung", -11.953720092773438 ], [ "▁Charlie", -11.953733444213867 ], [ "âge", -11.95376205444336 ], [ "▁threshold", -11.954041481018066 ], [ "▁assault", -11.954130172729492 ], [ "▁meist", -11.954141616821289 ], [ "bine", -11.954155921936035 ], [ "surprisingly", -11.954171180725098 ], [ "▁Protect", -11.954180717468262 ], [ "▁Hack", -11.954258918762207 ], [ "▁Quant", -11.954537391662598 ], [ "▁Cet", -11.954782485961914 ], [ "▁convinced", -11.95481014251709 ], [ "▁muncă", -11.955033302307129 ], [ "dging", -11.955066680908203 ], [ "▁Millionen", -11.955129623413086 ], [ "zahlung", -11.955148696899414 ], [ "▁anticipated", -11.955192565917969 ], [ "▁brass", -11.9552001953125 ], [ "KO", -11.955244064331055 ], [ "▁culori", -11.955286979675293 ], [ "▁Aero", -11.955326080322266 ], [ "▁intermediu", -11.955373764038086 ], [ "▁Philippines", -11.955381393432617 ], [ "▁jury", -11.955387115478516 ], [ "▁Funktion", -11.95569896697998 ], [ "▁probe", -11.955704689025879 ], [ "TL", -11.955748558044434 ], [ "1.0", -11.955804824829102 ], [ "ELL", -11.95581340789795 ], [ "She", -11.956001281738281 ], [ "▁Blood", -11.956073760986328 ], [ "▁Dean", -11.956111907958984 ], [ "▁scène", -11.9561185836792 ], [ "volu", -11.95621395111084 ], [ "▁Epi", -11.95621395111084 ], [ "▁séjour", -11.95627498626709 ], [ "▁Smartphone", -11.956306457519531 ], [ "▁fired", -11.956357955932617 ], [ "beat", -11.95650577545166 ], [ "▁pockets", -11.956506729125977 ], [ "▁serviciu", -11.956624031066895 ], [ "▁affairs", -11.95678424835205 ], [ "▁Ry", -11.956842422485352 ], [ "▁Stadium", -11.956954956054688 ], [ "▁snacks", -11.957182884216309 ], [ "▁efectu", -11.957221031188965 ], [ "▁Richtung", -11.957273483276367 ], [ "▁dresses", -11.957352638244629 ], [ "▁Medien", -11.95744800567627 ], [ "writer", -11.95759105682373 ], [ "changing", -11.957655906677246 ], [ "▁supportive", -11.957849502563477 ], [ "▁beneath", -11.957873344421387 ], [ "paid", -11.958078384399414 ], [ "▁customize", -11.958155632019043 ], [ "▁Ferr", -11.958187103271484 ], [ "reaches", -11.958338737487793 ], [ "arma", -11.958401679992676 ], [ "ción", -11.958598136901855 ], [ "▁elderly", -11.959243774414062 ], [ "▁modification", -11.95934009552002 ], [ "▁perfection", -11.959381103515625 ], [ "▁Allow", -11.959492683410645 ], [ "▁belonging", -11.959542274475098 ], [ "▁compound", -11.959589004516602 ], [ "▁Results", -11.959681510925293 ], [ "▁astăzi", -11.959793090820312 ], [ "▁Liber", -11.959818840026855 ], [ "jor", -11.959850311279297 ], [ "▁Nin", -11.959980964660645 ], [ "▁lumina", -11.959992408752441 ], [ "▁130", -11.960073471069336 ], [ "▁Platform", -11.960121154785156 ], [ "▁SMS", -11.960221290588379 ], [ "▁medic", -11.96024227142334 ], [ "hör", -11.960315704345703 ], [ "▁Kas", -11.96038818359375 ], [ "▁tomato", -11.960403442382812 ], [ "▁logiciel", -11.960505485534668 ], [ "php", -11.960654258728027 ], [ "▁premises", -11.96071720123291 ], [ "▁Communication", -11.96072769165039 ], [ "▁reprezintă", -11.960762023925781 ], [ "▁Partners", -11.960866928100586 ], [ "▁RV", -11.961090087890625 ], [ "▁pants", -11.961197853088379 ], [ "▁envie", -11.961256980895996 ], [ "▁commerce", -11.961263656616211 ], [ "▁tears", -11.961298942565918 ], [ "▁cooler", -11.961494445800781 ], [ "strand", -11.961556434631348 ], [ "▁Gil", -11.961588859558105 ], [ "▁référence", -11.961641311645508 ], [ "▁electronics", -11.961681365966797 ], [ "exposition", -11.961700439453125 ], [ "▁Caribbean", -11.96171760559082 ], [ "▁compelling", -11.96171760559082 ], [ "luci", -11.961723327636719 ], [ "▁Brooklyn", -11.961892127990723 ], [ "▁Thai", -11.961950302124023 ], [ "dler", -11.96198844909668 ], [ "▁supra", -11.962016105651855 ], [ "centered", -11.962026596069336 ], [ "▁metro", -11.962081909179688 ], [ "▁03", -11.962299346923828 ], [ "▁enrich", -11.962437629699707 ], [ "▁adevarat", -11.962594985961914 ], [ "5000", -11.962961196899414 ], [ "▁bell", -11.96297550201416 ], [ "▁sine", -11.962996482849121 ], [ "▁appealing", -11.963088989257812 ], [ "clam", -11.963116645812988 ], [ "▁vorhanden", -11.963165283203125 ], [ "▁pickup", -11.963268280029297 ], [ "▁Alaska", -11.963269233703613 ], [ "▁Nacht", -11.963300704956055 ], [ "borough", -11.9633207321167 ], [ "▁Blanc", -11.96340274810791 ], [ "▁apare", -11.963616371154785 ], [ "▁Works", -11.963798522949219 ], [ "mettent", -11.963801383972168 ], [ "atter", -11.96389389038086 ], [ "terra", -11.963946342468262 ], [ "▁Bit", -11.964105606079102 ], [ "RL", -11.964131355285645 ], [ "▁Wander", -11.964262962341309 ], [ "▁Hawk", -11.964595794677734 ], [ "▁Probleme", -11.964665412902832 ], [ "regel", -11.964729309082031 ], [ "hne", -11.964739799499512 ], [ "fass", -11.96486759185791 ], [ "▁Andy", -11.965014457702637 ], [ "▁befinde", -11.965179443359375 ], [ "boo", -11.965265274047852 ], [ "▁connectivity", -11.965304374694824 ], [ "▁spielt", -11.965418815612793 ], [ "zweiten", -11.96547794342041 ], [ "ţilor", -11.965526580810547 ], [ "▁confi", -11.96561336517334 ], [ "▁schlecht", -11.965773582458496 ], [ "▁Beginn", -11.96581745147705 ], [ "▁floating", -11.965903282165527 ], [ "nimmt", -11.966071128845215 ], [ "▁arbeiten", -11.96611213684082 ], [ "pillar", -11.966131210327148 ], [ "sterreich", -11.966347694396973 ], [ "▁Schule", -11.966446876525879 ], [ "▁durée", -11.966521263122559 ], [ "▁honestly", -11.96653938293457 ], [ "▁acel", -11.9666166305542 ], [ "▁Prozess", -11.96662425994873 ], [ "Min", -11.966629028320312 ], [ "enii", -11.966632843017578 ], [ "DAY", -11.966758728027344 ], [ "▁Blo", -11.966806411743164 ], [ "▁bolt", -11.966946601867676 ], [ "sicher", -11.967070579528809 ], [ "▁17,", -11.967122077941895 ], [ "▁anchor", -11.967215538024902 ], [ "▁consistency", -11.967241287231445 ], [ "▁relatives", -11.967263221740723 ], [ "▁lac", -11.967385292053223 ], [ "105", -11.967432975769043 ], [ "▁Craig", -11.967534065246582 ], [ "▁mandate", -11.967598915100098 ], [ "▁bedeutet", -11.967674255371094 ], [ "▁Soviet", -11.967680931091309 ], [ "▁arguments", -11.967938423156738 ], [ "▁Gebäude", -11.967997550964355 ], [ "▁Parliament", -11.968005180358887 ], [ "▁Kha", -11.968087196350098 ], [ "nica", -11.968130111694336 ], [ "▁Amazing", -11.968162536621094 ], [ "gründe", -11.968179702758789 ], [ "▁Ott", -11.968269348144531 ], [ "Exp", -11.968314170837402 ], [ "▁ianuarie", -11.96848201751709 ], [ "riot", -11.968571662902832 ], [ "▁futur", -11.968626976013184 ], [ "▁Honda", -11.968647956848145 ], [ "!!!!", -11.96865177154541 ], [ "▁citit", -11.968689918518066 ], [ "▁22,", -11.968708992004395 ], [ "țional", -11.968711853027344 ], [ "▁lovers", -11.968732833862305 ], [ "▁Current", -11.968835830688477 ], [ "▁drone", -11.96927261352539 ], [ "▁promising", -11.969335556030273 ], [ "devoted", -11.969443321228027 ], [ "▁Born", -11.969520568847656 ], [ "▁viitor", -11.969589233398438 ], [ "▁ritual", -11.969614028930664 ], [ "▁Guard", -11.969681739807129 ], [ "09.", -11.969828605651855 ], [ "▁Py", -11.970260620117188 ], [ "▁finds", -11.970380783081055 ], [ "▁boli", -11.970394134521484 ], [ "▁Mitglieder", -11.970697402954102 ], [ "ogni", -11.97107982635498 ], [ "▁stones", -11.97118854522705 ], [ "rox", -11.971210479736328 ], [ "▁dock", -11.971390724182129 ], [ "▁onion", -11.97144889831543 ], [ "▁classified", -11.971538543701172 ], [ "big", -11.971833229064941 ], [ "RG", -11.971857070922852 ], [ "influenced", -11.971955299377441 ], [ "▁sudden", -11.971988677978516 ], [ "▁ample", -11.97204303741455 ], [ "án", -11.972095489501953 ], [ "▁ornament", -11.972122192382812 ], [ "datele", -11.972227096557617 ], [ "▁Dad", -11.97225284576416 ], [ "BER", -11.972278594970703 ], [ "gerecht", -11.972380638122559 ], [ "kett", -11.972536087036133 ], [ "▁Antonio", -11.972572326660156 ], [ "Nu", -11.972834587097168 ], [ "dium", -11.97284984588623 ], [ "CAD", -11.972850799560547 ], [ "▁bundle", -11.972916603088379 ], [ "▁Vari", -11.97301197052002 ], [ "▁thrive", -11.973020553588867 ], [ "▁Seminar", -11.973071098327637 ], [ "wire", -11.973084449768066 ], [ "▁contributing", -11.973114967346191 ], [ "▁Bour", -11.97320556640625 ], [ "▁dori", -11.973206520080566 ], [ "▁packing", -11.97343921661377 ], [ "▁colleges", -11.973459243774414 ], [ "▁garbage", -11.97366714477539 ], [ "▁vector", -11.973837852478027 ], [ "▁suggestion", -11.973897933959961 ], [ "borne", -11.973904609680176 ], [ "▁Listen", -11.973938941955566 ], [ "▁Prix", -11.973957061767578 ], [ "viennent", -11.974162101745605 ], [ "insbesondere", -11.97426700592041 ], [ "▁fonctionne", -11.974435806274414 ], [ "▁mainstream", -11.974485397338867 ], [ "▁merci", -11.974574089050293 ], [ "oko", -11.97460651397705 ], [ "▁Commerce", -11.97493839263916 ], [ "▁droits", -11.975115776062012 ], [ "▁muzica", -11.975141525268555 ], [ "▁profesor", -11.9751558303833 ], [ "▁epic", -11.97518253326416 ], [ "▁intuitive", -11.975186347961426 ], [ "▁aggregate", -11.975223541259766 ], [ "▁vaccine", -11.97529411315918 ], [ "▁dank", -11.975459098815918 ], [ "▁situ", -11.975578308105469 ], [ "▁Cand", -11.975593566894531 ], [ "▁Ganz", -11.97562313079834 ], [ "▁Crystal", -11.97578239440918 ], [ "▁discretion", -11.975825309753418 ], [ "mug", -11.975997924804688 ], [ "▁anzu", -11.976144790649414 ], [ "▁cement", -11.97616958618164 ], [ "▁priest", -11.97625732421875 ], [ "▁rejected", -11.976298332214355 ], [ "▁Summit", -11.976325988769531 ], [ "▁Sara", -11.976424217224121 ], [ "▁palette", -11.976527214050293 ], [ "▁continuare", -11.976569175720215 ], [ "uge", -11.976676940917969 ], [ "ryl", -11.976844787597656 ], [ "▁Solid", -11.977142333984375 ], [ "▁meilleure", -11.977177619934082 ], [ "▁Tennessee", -11.977248191833496 ], [ "rail", -11.977326393127441 ], [ "▁attributes", -11.9773530960083 ], [ "▁vessels", -11.977840423583984 ], [ "cylinder", -11.977900505065918 ], [ "▁parfait", -11.977916717529297 ], [ "abb", -11.97801399230957 ], [ "▁Julie", -11.97806167602539 ], [ "▁pièces", -11.978120803833008 ], [ "▁proiecte", -11.978142738342285 ], [ "médi", -11.978273391723633 ], [ "▁décembre", -11.9783935546875 ], [ "Per", -11.97841739654541 ], [ "1/", -11.978520393371582 ], [ "regulated", -11.978601455688477 ], [ "▁Dy", -11.978633880615234 ], [ "▁23,", -11.978694915771484 ], [ "beck", -11.978763580322266 ], [ "tură", -11.97885513305664 ], [ "▁Chiar", -11.978931427001953 ], [ "▁isolated", -11.979012489318848 ], [ "▁kennen", -11.979259490966797 ], [ "Du", -11.979260444641113 ], [ "reflected", -11.979482650756836 ], [ "▁belong", -11.979571342468262 ], [ "▁welcomed", -11.97969913482666 ], [ "▁Rate", -11.979776382446289 ], [ "prestigious", -11.979859352111816 ], [ "▁1/4", -11.979930877685547 ], [ "▁distinction", -11.979966163635254 ], [ "▁boring", -11.980001449584961 ], [ "▁booked", -11.980369567871094 ], [ "▁citizen", -11.980441093444824 ], [ "▁comprises", -11.980498313903809 ], [ "▁aufge", -11.98051929473877 ], [ "GL", -11.980566024780273 ], [ "▁nearest", -11.980616569519043 ], [ "▁printr", -11.980692863464355 ], [ "▁département", -11.981318473815918 ], [ "▁planner", -11.981510162353516 ], [ "▁Rai", -11.981817245483398 ], [ "▁Broad", -11.981934547424316 ], [ "▁pastor", -11.981947898864746 ], [ "▁reservation", -11.982243537902832 ], [ "▁decembrie", -11.982315063476562 ], [ "▁suficient", -11.982501983642578 ], [ "geld", -11.982560157775879 ], [ "training", -11.982620239257812 ], [ "deshalb", -11.982634544372559 ], [ "▁chaud", -11.982651710510254 ], [ "Cor", -11.982662200927734 ], [ "▁Grade", -11.982769966125488 ], [ "▁faţă", -11.982809066772461 ], [ "story", -11.982839584350586 ], [ "gericht", -11.98286247253418 ], [ "▁Got", -11.982954025268555 ], [ "particulièrement", -11.982976913452148 ], [ "▁bump", -11.983051300048828 ], [ "▁fatigue", -11.983160018920898 ], [ "Activ", -11.983250617980957 ], [ "▁numéro", -11.983302116394043 ], [ "▁stranger", -11.983312606811523 ], [ "▁Skin", -11.983327865600586 ], [ "add", -11.98344898223877 ], [ "Ainsi", -11.98357105255127 ], [ "▁assists", -11.983684539794922 ], [ "▁zusätzlich", -11.983943939208984 ], [ "▁vede", -11.983979225158691 ], [ "RON", -11.984108924865723 ], [ "▁seemingly", -11.984126091003418 ], [ "▁NU", -11.98417854309082 ], [ "geb", -11.984273910522461 ], [ "▁Release", -11.984353065490723 ], [ "▁throwing", -11.984427452087402 ], [ "▁Alabama", -11.984447479248047 ], [ "▁Something", -11.984590530395508 ], [ "▁Cuba", -11.98464584350586 ], [ "▁Verbindung", -11.984649658203125 ], [ "▁Cir", -11.984654426574707 ], [ "your", -11.984713554382324 ], [ "-13", -11.984748840332031 ], [ "▁Delta", -11.984801292419434 ], [ "▁Twin", -11.98504638671875 ], [ "▁governance", -11.985156059265137 ], [ "▁groom", -11.985310554504395 ], [ "▁conception", -11.98533821105957 ], [ "▁governor", -11.985383033752441 ], [ "▁Spar", -11.985416412353516 ], [ "▁coastal", -11.985652923583984 ], [ "▁Seven", -11.985856056213379 ], [ "▁inclusive", -11.986002922058105 ], [ "cili", -11.986035346984863 ], [ "▁Ridge", -11.986100196838379 ], [ "teller", -11.986224174499512 ], [ "▁Kin", -11.986247062683105 ], [ "leiter", -11.986279487609863 ], [ "stern", -11.986364364624023 ], [ "change", -11.986404418945312 ], [ "▁presidential", -11.986433982849121 ], [ "▁composer", -11.986544609069824 ], [ "Stu", -11.986560821533203 ], [ "▁Frankfurt", -11.986584663391113 ], [ "prä", -11.986639976501465 ], [ "▁Ideal", -11.986644744873047 ], [ "▁linear", -11.986857414245605 ], [ "▁bloom", -11.986879348754883 ], [ "▁grades", -11.986881256103516 ], [ "mettant", -11.98692512512207 ], [ "▁finishes", -11.986952781677246 ], [ "holz", -11.987086296081543 ], [ "▁dirty", -11.987317085266113 ], [ "▁Roh", -11.987386703491211 ], [ "▁Praxis", -11.987408638000488 ], [ "tempo", -11.987433433532715 ], [ "▁attempted", -11.987433433532715 ], [ "▁primar", -11.987434387207031 ], [ "▁pomp", -11.987528800964355 ], [ "▁tolle", -11.987614631652832 ], [ "▁adres", -11.988011360168457 ], [ "▁Between", -11.988066673278809 ], [ "▁ruin", -11.988432884216309 ], [ "▁matériel", -11.988561630249023 ], [ "MER", -11.988913536071777 ], [ "Nevertheless", -11.989055633544922 ], [ "▁corruption", -11.989119529724121 ], [ "spire", -11.989180564880371 ], [ "▁mou", -11.989208221435547 ], [ "ROM", -11.989278793334961 ], [ "▁underground", -11.98935604095459 ], [ "▁relativ", -11.989389419555664 ], [ "waited", -11.989462852478027 ], [ "▁speeds", -11.989468574523926 ], [ "▁adjusted", -11.989486694335938 ], [ "▁Flat", -11.989514350891113 ], [ "UND", -11.98965835571289 ], [ "▁individuelle", -11.989744186401367 ], [ "▁anybody", -11.98978042602539 ], [ "EO", -11.989790916442871 ], [ "->", -11.989791870117188 ], [ "▁Spend", -11.989876747131348 ], [ "aktion", -11.990011215209961 ], [ "édit", -11.99006462097168 ], [ "▁quest", -11.990078926086426 ], [ "rind", -11.990541458129883 ], [ "▁mediu", -11.99057388305664 ], [ "▁barriers", -11.99062442779541 ], [ "▁répondre", -11.990633010864258 ], [ "▁novembre", -11.990708351135254 ], [ "▁champ", -11.990736961364746 ], [ "saw", -11.990757942199707 ], [ "▁fed", -11.990804672241211 ], [ "▁favorites", -11.990939140319824 ], [ "▁shield", -11.991055488586426 ], [ "▁Wide", -11.991146087646484 ], [ "▁problema", -11.991445541381836 ], [ "▁Asta", -11.991525650024414 ], [ "▁refreshing", -11.99168872833252 ], [ "hey", -11.991692543029785 ], [ "obtaining", -11.991788864135742 ], [ "▁parler", -11.992072105407715 ], [ "▁Cele", -11.992134094238281 ], [ "frage", -11.992136001586914 ], [ "écran", -11.992324829101562 ], [ "▁cleared", -11.992448806762695 ], [ "zehn", -11.992594718933105 ], [ "parmi", -11.992647171020508 ], [ "änder", -11.992691993713379 ], [ "▁Defense", -11.992693901062012 ], [ "tatea", -11.992696762084961 ], [ "▁reasonably", -11.992939949035645 ], [ "▁Idee", -11.992985725402832 ], [ "nehm", -11.993000030517578 ], [ "technologie", -11.993020057678223 ], [ "atura", -11.993048667907715 ], [ "▁slope", -11.993332862854004 ], [ "Hence", -11.993351936340332 ], [ "▁40%", -11.993391990661621 ], [ "▁jewe", -11.993448257446289 ], [ "▁queries", -11.993470191955566 ], [ "▁$8", -11.994096755981445 ], [ "▁Parker", -11.994107246398926 ], [ "▁publique", -11.994488716125488 ], [ "quant", -11.994529724121094 ], [ "issue", -11.994690895080566 ], [ "▁Cleveland", -11.994847297668457 ], [ "4,000", -11.995071411132812 ], [ "IDE", -11.995145797729492 ], [ "▁Barbara", -11.995233535766602 ], [ "udge", -11.995477676391602 ], [ "corn", -11.99554443359375 ], [ "veți", -11.995588302612305 ], [ "▁proteins", -11.995707511901855 ], [ "▁trăi", -11.995793342590332 ], [ "▁mijloc", -11.995842933654785 ], [ "logie", -11.995884895324707 ], [ "▁Walter", -11.995884895324707 ], [ "heißt", -11.99593448638916 ], [ "search", -11.995946884155273 ], [ "▁hochwertige", -11.996010780334473 ], [ "▁încerc", -11.996014595031738 ], [ "▁administrator", -11.99608039855957 ], [ "tension", -11.996133804321289 ], [ "▁homemade", -11.996438026428223 ], [ "▁$20", -11.99651050567627 ], [ "▁leben", -11.996662139892578 ], [ "netz", -11.996665954589844 ], [ "▁intensity", -11.996882438659668 ], [ "▁clever", -11.996891975402832 ], [ "▁installer", -11.996999740600586 ], [ "▁Wand", -11.997087478637695 ], [ "meister", -11.997130393981934 ], [ "ziel", -11.99744701385498 ], [ "▁architect", -11.99748706817627 ], [ "▁crede", -11.997512817382812 ], [ "▁Sleep", -11.997675895690918 ], [ "▁demonstr", -11.997745513916016 ], [ "cake", -11.997781753540039 ], [ "▁Cheap", -11.997783660888672 ], [ "pool", -11.9979829788208 ], [ "▁gadget", -11.998004913330078 ], [ "▁Anbieter", -11.998005867004395 ], [ "▁Jonathan", -11.998170852661133 ], [ "ül", -11.998492240905762 ], [ "▁Harvard", -11.998503684997559 ], [ "▁1985", -11.998773574829102 ], [ "HP", -11.998839378356934 ], [ "▁afara", -11.99893569946289 ], [ "▁halten", -11.999008178710938 ], [ "▁Technik", -11.999042510986328 ], [ "▁dressed", -11.999149322509766 ], [ "weis", -11.999165534973145 ], [ "▁donated", -11.9993314743042 ], [ "also", -11.99938678741455 ], [ "▁EN", -11.999405860900879 ], [ "▁imprim", -11.99942398071289 ], [ "▁onions", -11.999458312988281 ], [ "Par", -11.99950122833252 ], [ "▁donate", -11.99958324432373 ], [ "▁mice", -11.999610900878906 ], [ "referring", -11.999897956848145 ], [ "▁restored", -12.00003433227539 ], [ "▁amateur", -12.0000581741333 ], [ "▁Switch", -12.000075340270996 ], [ "appel", -12.00013542175293 ], [ "▁idéal", -12.0001859664917 ], [ "▁wheat", -12.000199317932129 ], [ "▁lime", -12.000240325927734 ], [ "REA", -12.00027084350586 ], [ "riti", -12.000357627868652 ], [ "ţiile", -12.00058364868164 ], [ "▁machinery", -12.00064754486084 ], [ "UNE", -12.00089168548584 ], [ "▁Cont", -12.000971794128418 ], [ "▁attendees", -12.001014709472656 ], [ "▁aparat", -12.001080513000488 ], [ "freundlich", -12.00117301940918 ], [ "▁zilnic", -12.001175880432129 ], [ "▁spark", -12.001421928405762 ], [ "▁Gast", -12.001459121704102 ], [ "▁Issue", -12.00147533416748 ], [ "▁scam", -12.001566886901855 ], [ "▁bonds", -12.001618385314941 ], [ "owner", -12.001641273498535 ], [ "▁empfehlen", -12.001673698425293 ], [ "elia", -12.001749992370605 ], [ "cic", -12.001757621765137 ], [ "▁honored", -12.001800537109375 ], [ "▁castle", -12.001846313476562 ], [ "avand", -12.002058982849121 ], [ "rough", -12.002108573913574 ], [ "▁Address", -12.002116203308105 ], [ "angle", -12.00217342376709 ], [ "leton", -12.002259254455566 ], [ "▁locked", -12.002392768859863 ], [ "▁consolid", -12.00248908996582 ], [ "▁voucher", -12.003011703491211 ], [ "ației", -12.003201484680176 ], [ "wachsen", -12.003211975097656 ], [ "▁magazines", -12.003287315368652 ], [ "▁Schools", -12.003318786621094 ], [ "▁voices", -12.003362655639648 ], [ "▁Dry", -12.003479957580566 ], [ "▁tricks", -12.00349235534668 ], [ "schließlich", -12.003546714782715 ], [ "▁loyalty", -12.003687858581543 ], [ "risk", -12.003764152526855 ], [ "▁Vers", -12.003786087036133 ], [ "chester", -12.003802299499512 ], [ "▁decorated", -12.003830909729004 ], [ "▁copiilor", -12.003969192504883 ], [ "riz", -12.003994941711426 ], [ "03.", -12.004013061523438 ], [ "▁Hur", -12.004016876220703 ], [ "▁archive", -12.004021644592285 ], [ "▁Continue", -12.004042625427246 ], [ "▁Nähe", -12.004043579101562 ], [ "jit", -12.004090309143066 ], [ "gekommen", -12.004301071166992 ], [ "▁conjunction", -12.004349708557129 ], [ "combining", -12.004404067993164 ], [ "▁Unterstützung", -12.004517555236816 ], [ "oza", -12.004593849182129 ], [ "▁sketch", -12.004720687866211 ], [ "▁arată", -12.004731178283691 ], [ "▁Mining", -12.004765510559082 ], [ "uous", -12.004791259765625 ], [ "▁devis", -12.004834175109863 ], [ "Almost", -12.004862785339355 ], [ "Hu", -12.005037307739258 ], [ "▁Om", -12.005366325378418 ], [ "MF", -12.00544548034668 ], [ "liz", -12.005451202392578 ], [ "▁fails", -12.005456924438477 ], [ "▁comparable", -12.005459785461426 ], [ "▁vein", -12.005547523498535 ], [ "▁Vis", -12.00561809539795 ], [ "▁viagra", -12.005654335021973 ], [ "▁farming", -12.005678176879883 ], [ "▁Late", -12.005765914916992 ], [ "geschrieben", -12.006033897399902 ], [ "hrew", -12.006103515625 ], [ "▁melt", -12.006120681762695 ], [ "lager", -12.006168365478516 ], [ "halte", -12.006240844726562 ], [ "▁Hotels", -12.006266593933105 ], [ "▁facebook", -12.0064058303833 ], [ "▁défi", -12.006550788879395 ], [ "shore", -12.006802558898926 ], [ "▁membrane", -12.006866455078125 ], [ "▁sixth", -12.006903648376465 ], [ "api", -12.007003784179688 ], [ "▁Owner", -12.007222175598145 ], [ "▁(\"", -12.007234573364258 ], [ "▁$50", -12.007280349731445 ], [ "▁protective", -12.007420539855957 ], [ "/2", -12.007548332214355 ], [ "▁Girls", -12.007562637329102 ], [ "Gri", -12.00769329071045 ], [ "▁nouă", -12.007708549499512 ], [ "▁infections", -12.007813453674316 ], [ "rân", -12.007868766784668 ], [ "▁Geb", -12.0078763961792 ], [ "▁Conseil", -12.007905006408691 ], [ "▁imagini", -12.007909774780273 ], [ "▁promotions", -12.00794792175293 ], [ "▁enforce", -12.00795841217041 ], [ "▁applicant", -12.007965087890625 ], [ "▁Apart", -12.008087158203125 ], [ "▁progression", -12.008151054382324 ], [ "▁careers", -12.008511543273926 ], [ "▁litigation", -12.008533477783203 ], [ "▁Menge", -12.00866413116455 ], [ "▁Contract", -12.00871753692627 ], [ "▁Kel", -12.0087308883667 ], [ "▁réserve", -12.008769035339355 ], [ "▁Cold", -12.008870124816895 ], [ "▁larg", -12.009040832519531 ], [ "▁microwave", -12.009090423583984 ], [ "▁Whit", -12.009212493896484 ], [ "▁Technologies", -12.009381294250488 ], [ "OU", -12.00949478149414 ], [ "itudine", -12.00959587097168 ], [ "▁handles", -12.009895324707031 ], [ "▁proceedings", -12.009982109069824 ], [ "▁prizes", -12.010043144226074 ], [ "▁unterstützen", -12.010062217712402 ], [ "▁piele", -12.010090827941895 ], [ "▁profound", -12.010153770446777 ], [ "schließen", -12.0101957321167 ], [ "▁trafic", -12.01025104522705 ], [ "▁Nar", -12.010441780090332 ], [ "▁Gesamt", -12.0106201171875 ], [ "▁bugs", -12.010720252990723 ], [ "▁Amy", -12.010764122009277 ], [ "▁eastern", -12.010775566101074 ], [ "nice", -12.010784149169922 ], [ "▁Besuch", -12.010835647583008 ], [ "▁synth", -12.010892868041992 ], [ "▁clasa", -12.011194229125977 ], [ "Book", -12.01134204864502 ], [ "▁ribbon", -12.011415481567383 ], [ "▁neues", -12.011431694030762 ], [ "ZE", -12.011504173278809 ], [ "▁peers", -12.011613845825195 ], [ "leistung", -12.011730194091797 ], [ "▁internship", -12.011808395385742 ], [ "count", -12.011850357055664 ], [ "nam", -12.01193618774414 ], [ "▁12-", -12.012072563171387 ], [ "acked", -12.012146949768066 ], [ "gonna", -12.012146949768066 ], [ "▁Dinge", -12.01215648651123 ], [ "Time", -12.012299537658691 ], [ "▁twelve", -12.01242446899414 ], [ "eye", -12.012432098388672 ], [ "▁avantaj", -12.01253604888916 ], [ "▁Glas", -12.012731552124023 ], [ "aucune", -12.0127534866333 ], [ "▁boil", -12.012763977050781 ], [ "▁Gray", -12.012773513793945 ], [ "adapt", -12.01288890838623 ], [ "occ", -12.012895584106445 ], [ "▁prieten", -12.012897491455078 ], [ "▁trai", -12.01296615600586 ], [ "▁Scal", -12.013009071350098 ], [ "▁conscious", -12.013057708740234 ], [ "▁charter", -12.013093948364258 ], [ "KS", -12.013242721557617 ], [ "▁Barr", -12.013404846191406 ], [ "▁summit", -12.013411521911621 ], [ "▁inflammation", -12.013439178466797 ], [ "tungs", -12.013440132141113 ], [ "ovic", -12.013449668884277 ], [ "▁conduit", -12.013465881347656 ], [ "▁Alice", -12.013702392578125 ], [ "▁veterans", -12.013850212097168 ], [ "Während", -12.013944625854492 ], [ "▁maximal", -12.014013290405273 ], [ "▁Hawaii", -12.014037132263184 ], [ "▁Pine", -12.01432991027832 ], [ "acelasi", -12.014391899108887 ], [ "hyp", -12.014424324035645 ], [ "sensitivity", -12.01445198059082 ], [ "pour", -12.014481544494629 ], [ "ре", -12.014493942260742 ], [ "▁Kentucky", -12.015129089355469 ], [ "▁badge", -12.015276908874512 ], [ "affecting", -12.015310287475586 ], [ "▁chairman", -12.015311241149902 ], [ "▁München", -12.015467643737793 ], [ "▁Hersteller", -12.015469551086426 ], [ "▁urmat", -12.015615463256836 ], [ "tels", -12.015654563903809 ], [ "▁FM", -12.015701293945312 ], [ "▁Basis", -12.015732765197754 ], [ "▁erklärt", -12.015809059143066 ], [ "▁changer", -12.015859603881836 ], [ "tischen", -12.0159330368042 ], [ "▁brave", -12.015960693359375 ], [ "▁siguranta", -12.015986442565918 ], [ "▁partnerships", -12.015989303588867 ], [ "ților", -12.015999794006348 ], [ "▁breathe", -12.016141891479492 ], [ "rink", -12.016551971435547 ], [ "▁footage", -12.016654014587402 ], [ "▁transformed", -12.016658782958984 ], [ "▁prep", -12.016866683959961 ], [ "▁upset", -12.016901969909668 ], [ "▁Native", -12.017059326171875 ], [ "▁Prima", -12.017154693603516 ], [ "▁jersey", -12.017163276672363 ], [ "230", -12.017182350158691 ], [ "▁lucrurile", -12.017393112182617 ], [ "▁divine", -12.017502784729004 ], [ "▁Pit", -12.017593383789062 ], [ "RIS", -12.01765251159668 ], [ "▁Cultural", -12.017672538757324 ], [ "▁exotic", -12.017786979675293 ], [ "▁tastes", -12.017881393432617 ], [ "▁bargain", -12.017913818359375 ], [ "▁optimize", -12.017985343933105 ], [ "▁électrique", -12.018012046813965 ], [ "deuxième", -12.018030166625977 ], [ "▁Gary", -12.018085479736328 ], [ "▁projection", -12.018122673034668 ], [ "▁sliding", -12.018195152282715 ], [ "club", -12.018216133117676 ], [ "association", -12.01823902130127 ], [ "▁LG", -12.018259048461914 ], [ "▁capsule", -12.018291473388672 ], [ "▁politicians", -12.018397331237793 ], [ "▁thumb", -12.018423080444336 ], [ "▁globally", -12.018743515014648 ], [ "positioned", -12.018796920776367 ], [ "▁Hamilton", -12.018861770629883 ], [ "arme", -12.018881797790527 ], [ "▁efectuat", -12.018881797790527 ], [ "zip", -12.019111633300781 ], [ "▁welfare", -12.019201278686523 ], [ "Leistung", -12.019230842590332 ], [ "▁Bac", -12.019316673278809 ], [ "▁fizic", -12.019338607788086 ], [ "OK", -12.019454002380371 ], [ "▁limba", -12.019545555114746 ], [ "▁wardrobe", -12.019549369812012 ], [ "▁offline", -12.019627571105957 ], [ "▁fortune", -12.019665718078613 ], [ "▁dialog", -12.019681930541992 ], [ "▁dramatically", -12.01997184753418 ], [ "▁NYC", -12.020045280456543 ], [ "▁Rem", -12.02017593383789 ], [ "▁bronze", -12.020455360412598 ], [ "▁pulse", -12.02053451538086 ], [ "Fortunately", -12.020562171936035 ], [ "▁glue", -12.020596504211426 ], [ "▁Expo", -12.020720481872559 ], [ "▁profitable", -12.020776748657227 ], [ "▁distributor", -12.020845413208008 ], [ "abilité", -12.020869255065918 ], [ "▁lyrics", -12.020913124084473 ], [ "▁mesh", -12.02114486694336 ], [ "▁organizational", -12.021157264709473 ], [ "▁vanilla", -12.021249771118164 ], [ "▁foc", -12.021355628967285 ], [ "▁1984", -12.02147388458252 ], [ "▁créé", -12.02172565460205 ], [ "▁servi", -12.022027969360352 ], [ "▁underneath", -12.022095680236816 ], [ "▁surveys", -12.022143363952637 ], [ "▁genes", -12.022238731384277 ], [ "▁limite", -12.02224349975586 ], [ "oder", -12.022247314453125 ], [ "▁mandatory", -12.022269248962402 ], [ "▁hospitality", -12.022303581237793 ], [ "▁bikes", -12.022309303283691 ], [ "▁Quote", -12.022358894348145 ], [ "glu", -12.02241039276123 ], [ "▁activitatea", -12.022513389587402 ], [ "preventing", -12.022584915161133 ], [ "▁Kh", -12.02259635925293 ], [ "économie", -12.022616386413574 ], [ "▁visite", -12.022757530212402 ], [ "▁spectacle", -12.022778511047363 ], [ "▁tract", -12.022860527038574 ], [ "▁quant", -12.022862434387207 ], [ "▁evolu", -12.022866249084473 ], [ "▁invata", -12.023070335388184 ], [ "▁homo", -12.02311897277832 ], [ "▁Users", -12.02344799041748 ], [ "introducing", -12.023632049560547 ], [ "hibi", -12.023661613464355 ], [ "▁Instrument", -12.023805618286133 ], [ "▁ép", -12.023839950561523 ], [ "▁Raj", -12.023869514465332 ], [ "▁executives", -12.023881912231445 ], [ "atoire", -12.023885726928711 ], [ "▁erforderlich", -12.02397346496582 ], [ "male", -12.024211883544922 ], [ "umble", -12.024271011352539 ], [ "erson", -12.024277687072754 ], [ "▁Treatment", -12.024286270141602 ], [ "▁Representative", -12.024314880371094 ], [ "▁corners", -12.024409294128418 ], [ "▁Petit", -12.024599075317383 ], [ "8)", -12.02464771270752 ], [ "▁Walker", -12.024714469909668 ], [ "▁Stir", -12.02476692199707 ], [ "/19", -12.024767875671387 ], [ "▁Stelle", -12.024979591369629 ], [ "ără", -12.025009155273438 ], [ "osse", -12.025166511535645 ], [ "2000", -12.025189399719238 ], [ "▁McG", -12.025580406188965 ], [ "DV", -12.025773048400879 ], [ "▁Firm", -12.025862693786621 ], [ "▁packet", -12.025904655456543 ], [ "Toate", -12.02640438079834 ], [ "▁institutional", -12.026479721069336 ], [ "rug", -12.026663780212402 ], [ "DG", -12.026837348937988 ], [ "fine", -12.026837348937988 ], [ "bringen", -12.026856422424316 ], [ "▁Horse", -12.026921272277832 ], [ "▁premiere", -12.026937484741211 ], [ "▁Că", -12.027026176452637 ], [ "acheter", -12.02703857421875 ], [ "▁Afghanistan", -12.027053833007812 ], [ "▁Prop", -12.027085304260254 ], [ "ühr", -12.02715015411377 ], [ "▁braucht", -12.027398109436035 ], [ "▁sunny", -12.027424812316895 ], [ "▁Sach", -12.027461051940918 ], [ "▁volumes", -12.02753734588623 ], [ "tinut", -12.02759838104248 ], [ "▁Sho", -12.027722358703613 ], [ "▁winds", -12.027735710144043 ], [ "▁Mall", -12.027873992919922 ], [ "ledge", -12.027937889099121 ], [ "▁sciences", -12.027997016906738 ], [ "plication", -12.028024673461914 ], [ "VR", -12.028068542480469 ], [ "destin", -12.028234481811523 ], [ "▁früh", -12.02833366394043 ], [ "▁tongue", -12.028359413146973 ], [ "▁Jennifer", -12.028425216674805 ], [ "▁bracket", -12.028427124023438 ], [ "▁episodes", -12.02845287322998 ], [ "breite", -12.028461456298828 ], [ "▁stoc", -12.028635025024414 ], [ "ilia", -12.028728485107422 ], [ "▁Gulf", -12.02874755859375 ], [ "▁transparency", -12.028768539428711 ], [ "Industrie", -12.028853416442871 ], [ "▁viewers", -12.028916358947754 ], [ "AIN", -12.029129981994629 ], [ "▁Registration", -12.029149055480957 ], [ "/4", -12.029309272766113 ], [ "▁fera", -12.029337882995605 ], [ "▁06", -12.029351234436035 ], [ "▁einzu", -12.029391288757324 ], [ "enburg", -12.02944278717041 ], [ "▁eff", -12.029449462890625 ], [ "▁Stage", -12.029558181762695 ], [ "▁Cour", -12.029685020446777 ], [ "indu", -12.029836654663086 ], [ "▁Tools", -12.029909133911133 ], [ "IST", -12.029921531677246 ], [ "grund", -12.030105590820312 ], [ "seitig", -12.030153274536133 ], [ "pai", -12.030250549316406 ], [ "▁waist", -12.030350685119629 ], [ "▁Therapy", -12.03049373626709 ], [ "▁nomination", -12.030599594116211 ], [ "▁seama", -12.030790328979492 ], [ "▁analyse", -12.030975341796875 ], [ "▁emerge", -12.031044006347656 ], [ "▁adjustment", -12.031106948852539 ], [ "▁stroll", -12.031106948852539 ], [ "▁Beyond", -12.031174659729004 ], [ "▁legally", -12.03122615814209 ], [ "▁gauge", -12.03123664855957 ], [ "▁26,", -12.031360626220703 ], [ "Tex", -12.031390190124512 ], [ "economic", -12.031488418579102 ], [ "stoffe", -12.031532287597656 ], [ "Wir", -12.031559944152832 ], [ "ffen", -12.031601905822754 ], [ "▁acoperi", -12.031609535217285 ], [ "▁finale", -12.031792640686035 ], [ "▁theoretical", -12.031864166259766 ], [ "1.3", -12.031875610351562 ], [ "anim", -12.031888008117676 ], [ "▁separation", -12.031928062438965 ], [ "agence", -12.031937599182129 ], [ "▁réalisé", -12.032069206237793 ], [ "sprech", -12.03215503692627 ], [ "▁embedded", -12.032208442687988 ], [ "▁defence", -12.032242774963379 ], [ "éni", -12.032569885253906 ], [ "▁Norman", -12.032613754272461 ], [ "▁insgesamt", -12.032621383666992 ], [ "▁reminde", -12.032631874084473 ], [ "▁timeline", -12.032703399658203 ], [ "▁symbols", -12.032770156860352 ], [ "▁booth", -12.032783508300781 ], [ "▁Window", -12.032788276672363 ], [ "▁Titan", -12.032910346984863 ], [ "înt", -12.033021926879883 ], [ "▁langa", -12.033021926879883 ], [ "isant", -12.03303337097168 ], [ "hart", -12.033113479614258 ], [ "broader", -12.033266067504883 ], [ "▁stays", -12.033288955688477 ], [ "dur", -12.033488273620605 ], [ "▁Actually", -12.033514022827148 ], [ "works", -12.03351879119873 ], [ "▁réussi", -12.03357219696045 ], [ "▁performant", -12.033658981323242 ], [ "▁banana", -12.033788681030273 ], [ "▁baked", -12.033870697021484 ], [ "▁Parlament", -12.033931732177734 ], [ "▁Legend", -12.033967018127441 ], [ "toata", -12.034172058105469 ], [ "platte", -12.03419017791748 ], [ "▁Mou", -12.034192085266113 ], [ "HL", -12.034235000610352 ], [ "▁(8", -12.034290313720703 ], [ "▁accepting", -12.034313201904297 ], [ "▁Senator", -12.034340858459473 ], [ "▁consciousness", -12.034396171569824 ], [ "▁conducting", -12.0344820022583 ], [ "▁panic", -12.034833908081055 ], [ "▁FDA", -12.035112380981445 ], [ "▁(7", -12.035163879394531 ], [ "tool", -12.035300254821777 ], [ "▁Shipping", -12.03538703918457 ], [ "▁hop", -12.035545349121094 ], [ "▁conferences", -12.03564167022705 ], [ "▁pork", -12.035661697387695 ], [ "▁spam", -12.035730361938477 ], [ "▁interesant", -12.035815238952637 ], [ "▁Tagen", -12.03581714630127 ], [ "sig", -12.035886764526367 ], [ "étro", -12.036044120788574 ], [ "▁legendary", -12.036449432373047 ], [ "▁Alternative", -12.036643981933594 ], [ "iana", -12.036704063415527 ], [ "▁responsable", -12.036888122558594 ], [ "▁Mihai", -12.037237167358398 ], [ "▁decreased", -12.037345886230469 ], [ "▁organised", -12.037485122680664 ], [ "▁Lamp", -12.037589073181152 ], [ "litz", -12.037622451782227 ], [ "ohn", -12.037622451782227 ], [ "▁moteur", -12.0376615524292 ], [ "III", -12.03768539428711 ], [ "▁Montag", -12.037755012512207 ], [ "▁naturel", -12.037814140319824 ], [ "▁Hus", -12.037842750549316 ], [ "▁Schl", -12.037884712219238 ], [ "ains", -12.037968635559082 ], [ "▁dying", -12.0380859375 ], [ "▁HIV", -12.038115501403809 ], [ "],", -12.038164138793945 ], [ "alität", -12.03818416595459 ], [ "▁institute", -12.038249015808105 ], [ "mix", -12.038433074951172 ], [ "▁Regulation", -12.038453102111816 ], [ "▁pagina", -12.03857707977295 ], [ "▁Awesome", -12.03860092163086 ], [ "▁Official", -12.03860092163086 ], [ "▁Minute", -12.038601875305176 ], [ "▁dairy", -12.038787841796875 ], [ "▁carti", -12.038881301879883 ], [ "isk", -12.039091110229492 ], [ "▁thrilled", -12.039138793945312 ], [ "▁german", -12.039172172546387 ], [ "▁frustration", -12.039228439331055 ], [ "▁forums", -12.03927230834961 ], [ "command", -12.039361000061035 ], [ "▁router", -12.039399147033691 ], [ "▁Lösung", -12.039423942565918 ], [ "white", -12.039470672607422 ], [ "▁synthetic", -12.039487838745117 ], [ "▁retrouver", -12.039554595947266 ], [ "alle", -12.039621353149414 ], [ "daran", -12.039653778076172 ], [ "▁wahr", -12.039697647094727 ], [ "▁paths", -12.039875984191895 ], [ "▁unver", -12.039962768554688 ], [ "▁Environment", -12.0400972366333 ], [ "▁médecin", -12.040510177612305 ], [ "crypt", -12.040572166442871 ], [ "▁pursuit", -12.040595054626465 ], [ "flat", -12.040611267089844 ], [ "bron", -12.040698051452637 ], [ "▁Specialist", -12.040852546691895 ], [ "▁Vent", -12.041157722473145 ], [ "Gen", -12.04132080078125 ], [ "▁attraction", -12.04132080078125 ], [ "▁piese", -12.041372299194336 ], [ "CHE", -12.041665077209473 ], [ "fähig", -12.04172420501709 ], [ "▁28,", -12.041773796081543 ], [ "defender", -12.041810989379883 ], [ "▁stupid", -12.04181957244873 ], [ "enfin", -12.04185962677002 ], [ "▁composite", -12.04207706451416 ], [ "fragen", -12.042202949523926 ], [ "Part", -12.042232513427734 ], [ "may", -12.042238235473633 ], [ "▁Bucureşti", -12.042248725891113 ], [ "▁février", -12.042248725891113 ], [ "RED", -12.042417526245117 ], [ "▁makers", -12.042462348937988 ], [ "▁guns", -12.042594909667969 ], [ "▁pasta", -12.042706489562988 ], [ "STR", -12.04271125793457 ], [ "▁worthy", -12.042760848999023 ], [ "Poate", -12.042783737182617 ], [ "▁101", -12.04286003112793 ], [ "▁souhaitez", -12.04299545288086 ], [ "GN", -12.043449401855469 ], [ "drive", -12.043499946594238 ], [ "▁aveti", -12.043582916259766 ], [ "▁eventual", -12.043591499328613 ], [ "▁américain", -12.043642044067383 ], [ "▁Mine", -12.043678283691406 ], [ "▁sunset", -12.043729782104492 ], [ "▁Choice", -12.043844223022461 ], [ "▁offset", -12.043944358825684 ], [ "APP", -12.04410457611084 ], [ "▁suchen", -12.044130325317383 ], [ "▁aduc", -12.044228553771973 ], [ "▁Unternehmens", -12.044342041015625 ], [ "▁ -12.044651985168457 ], [ "▁astept", -12.044678688049316 ], [ "▁Birthday", -12.045061111450195 ], [ "▁barn", -12.045083999633789 ], [ "apport", -12.045105934143066 ], [ "▁collar", -12.045212745666504 ], [ "▁gefunden", -12.045294761657715 ], [ "▁Hai", -12.045429229736328 ], [ "▁Soul", -12.045441627502441 ], [ "ismus", -12.045654296875 ], [ "letzt", -12.045754432678223 ], [ "▁maker", -12.045841217041016 ], [ "▁executed", -12.045857429504395 ], [ "▁Forschung", -12.045915603637695 ], [ "▁täglich", -12.045958518981934 ], [ "▁tailor", -12.045960426330566 ], [ "▁headquarters", -12.0460844039917 ], [ "▁physicians", -12.046112060546875 ], [ "▁Scout", -12.046126365661621 ], [ "folgen", -12.046175003051758 ], [ "▁cycling", -12.046184539794922 ], [ "mindestens", -12.04620361328125 ], [ "▁joli", -12.046216011047363 ], [ "▁classification", -12.046225547790527 ], [ "▁Führung", -12.046258926391602 ], [ "▁peau", -12.04629135131836 ], [ "INT", -12.046502113342285 ], [ "▁Garage", -12.046664237976074 ], [ "teile", -12.046714782714844 ], [ "util", -12.046716690063477 ], [ "▁petrec", -12.046751022338867 ], [ "▁Nevada", -12.046826362609863 ], [ "▁laisser", -12.04706859588623 ], [ "▁territoire", -12.047131538391113 ], [ "▁fichier", -12.047154426574707 ], [ "▁Formula", -12.047343254089355 ], [ "scopul", -12.047379493713379 ], [ "▁Tee", -12.047486305236816 ], [ "▁Monte", -12.047529220581055 ], [ "▁pumpkin", -12.04757022857666 ], [ "▁picnic", -12.047589302062988 ], [ "▁occupation", -12.047652244567871 ], [ "▁numérique", -12.047831535339355 ], [ "linie", -12.04786491394043 ], [ "▁masina", -12.048117637634277 ], [ "▁Prä", -12.048173904418945 ], [ "▁dezvoltare", -12.048177719116211 ], [ "▁vient", -12.048291206359863 ], [ "▁ranks", -12.048295021057129 ], [ "▁Bruce", -12.048420906066895 ], [ "▁seara", -12.048433303833008 ], [ "▁hungry", -12.048563003540039 ], [ "▁resolved", -12.048650741577148 ], [ "paired", -12.048735618591309 ], [ "▁Congratulations", -12.048881530761719 ], [ "▁religi", -12.048918724060059 ], [ "sätze", -12.04897689819336 ], [ "▁Eat", -12.049172401428223 ], [ "▁dense", -12.049442291259766 ], [ "▁slice", -12.049447059631348 ], [ "▁mulți", -12.049463272094727 ], [ "▁vorbe", -12.049517631530762 ], [ "▁terminate", -12.049779891967773 ], [ "worm", -12.049880981445312 ], [ "ignon", -12.0499267578125 ], [ "▁Howard", -12.049992561340332 ], [ "▁toddler", -12.050017356872559 ], [ "▁waters", -12.050033569335938 ], [ "▁graduates", -12.0501708984375 ], [ "▁fundraising", -12.050298690795898 ], [ "06.", -12.05031967163086 ], [ "▁scent", -12.050346374511719 ], [ "▁CPU", -12.050406455993652 ], [ "▁Kid", -12.05045223236084 ], [ "▁Years", -12.050460815429688 ], [ "▁Oktober", -12.05063533782959 ], [ "filled", -12.050726890563965 ], [ "▁Laser", -12.05079460144043 ], [ "▁tut", -12.051032066345215 ], [ "ively", -12.051101684570312 ], [ "▁WiFi", -12.051161766052246 ], [ "standen", -12.051176071166992 ], [ "▁publié", -12.051243782043457 ], [ "▁explaining", -12.051279067993164 ], [ "trieb", -12.051288604736328 ], [ "▁Rapid", -12.0513334274292 ], [ "▁unterstützt", -12.051352500915527 ], [ "▁Sonnen", -12.051401138305664 ], [ "▁lenses", -12.05141544342041 ], [ "▁pressing", -12.051477432250977 ], [ "▁respected", -12.051657676696777 ], [ "adapted", -12.051706314086914 ], [ "Don", -12.051726341247559 ], [ "▁mun", -12.051733016967773 ], [ "MAR", -12.05180835723877 ], [ "▁seam", -12.051852226257324 ], [ "chev", -12.052140235900879 ], [ "▁Sozial", -12.052424430847168 ], [ "▁Arabia", -12.052485466003418 ], [ "▁equation", -12.05257511138916 ], [ "▁elevi", -12.052780151367188 ], [ "▁piata", -12.052868843078613 ], [ "JA", -12.052873611450195 ], [ "▁wholesale", -12.052887916564941 ], [ "▁faithful", -12.05296516418457 ], [ "legal", -12.053092002868652 ], [ "▁Brexit", -12.053095817565918 ], [ "vention", -12.053120613098145 ], [ "▁adhere", -12.053221702575684 ], [ "▁Associate", -12.053257942199707 ], [ "▁decorations", -12.053272247314453 ], [ "▁crois", -12.053359985351562 ], [ "buck", -12.053370475769043 ], [ "▁smartphones", -12.053421020507812 ], [ "Regardless", -12.053427696228027 ], [ "center", -12.053434371948242 ], [ "eiß", -12.053481101989746 ], [ "▁emotion", -12.053584098815918 ], [ "▁Gespräch", -12.053797721862793 ], [ "▁Avi", -12.053963661193848 ], [ "▁loft", -12.054059982299805 ], [ "▁Wissen", -12.054391860961914 ], [ "▁orchestra", -12.05439567565918 ], [ "▁gehören", -12.054421424865723 ], [ "▁Reich", -12.054532051086426 ], [ "▁abandoned", -12.054548263549805 ], [ "▁Lanka", -12.054586410522461 ], [ "pala", -12.054832458496094 ], [ "▁Stell", -12.054838180541992 ], [ "logged", -12.054924964904785 ], [ "terie", -12.054935455322266 ], [ "▁educa", -12.054954528808594 ], [ "1).", -12.055097579956055 ], [ "▁disponibil", -12.055119514465332 ], [ "IND", -12.055197715759277 ], [ "▁Pont", -12.055288314819336 ], [ "▁téléphone", -12.055398941040039 ], [ "▁rope", -12.055595397949219 ], [ "ève", -12.055622100830078 ], [ "▁Trainer", -12.056062698364258 ], [ "▁présence", -12.0560941696167 ], [ "▁Oscar", -12.056121826171875 ], [ "▁VR", -12.056342124938965 ], [ "▁Besucher", -12.056357383728027 ], [ "▁disponibles", -12.056447982788086 ], [ "▁gelten", -12.056604385375977 ], [ "▁ports", -12.056645393371582 ], [ "Invest", -12.056693077087402 ], [ "ésormais", -12.056795120239258 ], [ "schauen", -12.056880950927734 ], [ "▁Command", -12.056958198547363 ], [ "▁alternate", -12.05709171295166 ], [ "citation", -12.05713939666748 ], [ "évolution", -12.05714225769043 ], [ "▁Maine", -12.057145118713379 ], [ "pflege", -12.057174682617188 ], [ "2011", -12.057343482971191 ], [ "▁Ground", -12.057364463806152 ], [ "▁ghost", -12.057418823242188 ], [ "lebt", -12.057530403137207 ], [ "▁scenarios", -12.057595252990723 ], [ "▁mall", -12.057634353637695 ], [ "▁Kings", -12.057653427124023 ], [ "▁15%", -12.057848930358887 ], [ "▁Paint", -12.057848930358887 ], [ "FD", -12.057849884033203 ], [ "ugg", -12.058011054992676 ], [ "▁Leon", -12.058023452758789 ], [ "▁grows", -12.058135032653809 ], [ "▁pharmacy", -12.058384895324707 ], [ "▁situat", -12.0584135055542 ], [ "20,000", -12.05855941772461 ], [ "▁10,000", -12.058760643005371 ], [ "▁membre", -12.058771133422852 ], [ "▁facilement", -12.058806419372559 ], [ "▁Analytics", -12.058915138244629 ], [ "▁Marvel", -12.058930397033691 ], [ "▁survived", -12.059097290039062 ], [ "▁conviction", -12.059124946594238 ], [ "▁Produktion", -12.059260368347168 ], [ "▁professionally", -12.059293746948242 ], [ "▁contributor", -12.059486389160156 ], [ "▁Kurs", -12.059503555297852 ], [ "▁humor", -12.059549331665039 ], [ "▁cinci", -12.059609413146973 ], [ "▁Different", -12.059670448303223 ], [ "▁Verarbeitung", -12.059800148010254 ], [ "▁inexpensive", -12.059800148010254 ], [ "▁sortie", -12.05980110168457 ], [ "▁thankful", -12.059951782226562 ], [ "▁vacances", -12.059978485107422 ], [ "▁vergangen", -12.059979438781738 ], [ "▁wings", -12.05998420715332 ], [ "▁nano", -12.06003475189209 ], [ "▁touches", -12.060088157653809 ], [ "▁Notice", -12.060348510742188 ], [ "▁reprezinta", -12.060466766357422 ], [ "▁rewarding", -12.060555458068848 ], [ "▁Kurz", -12.060580253601074 ], [ "▁mega", -12.060611724853516 ], [ "▁secrets", -12.060646057128906 ], [ "▁vorher", -12.060667037963867 ], [ "▁crescut", -12.06074333190918 ], [ "▁coordination", -12.060754776000977 ], [ "▁dissertation", -12.060863494873047 ], [ "▁header", -12.060873985290527 ], [ "existent", -12.061070442199707 ], [ "thal", -12.061185836791992 ], [ "▁translate", -12.061214447021484 ], [ "vertrag", -12.06124210357666 ], [ "GU", -12.06126594543457 ], [ "▁Arthur", -12.061315536499023 ], [ "wahl", -12.061534881591797 ], [ "▁octobre", -12.061573028564453 ], [ "▁bother", -12.06157398223877 ], [ "▁pencil", -12.061580657958984 ], [ "▁Dyna", -12.061604499816895 ], [ "▁complimentary", -12.061651229858398 ], [ "écoute", -12.061676979064941 ], [ "PB", -12.061722755432129 ], [ "▁independently", -12.061759948730469 ], [ "▁targeting", -12.061840057373047 ], [ "fought", -12.061944961547852 ], [ "mental", -12.062112808227539 ], [ "▁Veranstaltung", -12.062300682067871 ], [ "▁tatsächlich", -12.062314987182617 ], [ "▁Features", -12.0625 ], [ "▁1920", -12.062554359436035 ], [ "▁Domain", -12.062885284423828 ], [ "▁rally", -12.062901496887207 ], [ "▁iunie", -12.063036918640137 ], [ "▁fabrics", -12.063070297241211 ], [ "▁mint", -12.063331604003906 ], [ "▁antioxidant", -12.063347816467285 ], [ "hut", -12.063432693481445 ], [ "EPA", -12.063496589660645 ], [ "▁rigid", -12.063498497009277 ], [ "▁evit", -12.063549995422363 ], [ "▁personnage", -12.063977241516113 ], [ "▁garanti", -12.0640287399292 ], [ "▁Hä", -12.064042091369629 ], [ "▁Days", -12.064048767089844 ], [ "boarding", -12.064050674438477 ], [ "jemand", -12.064166069030762 ], [ "▁Pos", -12.064262390136719 ], [ "▁wool", -12.064288139343262 ], [ "▁boom", -12.064349174499512 ], [ "▁wichtige", -12.06447982788086 ], [ "▁emerged", -12.064517974853516 ], [ "▁smoothly", -12.064802169799805 ], [ "▁Interview", -12.064942359924316 ], [ "gemäß", -12.06505012512207 ], [ "▁suivi", -12.065064430236816 ], [ "▁missions", -12.065129280090332 ], [ "▁Kreis", -12.065328598022461 ], [ "century", -12.065348625183105 ], [ "▁tuned", -12.065370559692383 ], [ "isieren", -12.065407752990723 ], [ "▁Branch", -12.065427780151367 ], [ "▁Russell", -12.065483093261719 ], [ "▁**", -12.065519332885742 ], [ "▁Lehr", -12.065617561340332 ], [ "▁perspectives", -12.065690040588379 ], [ "▁handed", -12.06570816040039 ], [ "▁apporte", -12.065743446350098 ], [ "unta", -12.065959930419922 ], [ "▁contemplat", -12.066255569458008 ], [ "riel", -12.06633472442627 ], [ "▁freely", -12.066341400146484 ], [ "▁loyal", -12.066451072692871 ], [ "▁evolved", -12.066518783569336 ], [ "▁Cafe", -12.066548347473145 ], [ "▁assignments", -12.066598892211914 ], [ "▁Cream", -12.066718101501465 ], [ "▁Build", -12.066731452941895 ], [ "▁exams", -12.066746711730957 ], [ "▁graduation", -12.066765785217285 ], [ "▁Dining", -12.066773414611816 ], [ "inne", -12.06684398651123 ], [ "▁propriu", -12.067055702209473 ], [ "▁accordingly", -12.067241668701172 ], [ "▁seniors", -12.067484855651855 ], [ "▁sisters", -12.067505836486816 ], [ "formerly", -12.067658424377441 ], [ "▁fleur", -12.067702293395996 ], [ "▁alten", -12.067802429199219 ], [ "▁Gefühl", -12.06797981262207 ], [ "▁freeze", -12.068222045898438 ], [ "▁structured", -12.068312644958496 ], [ "▁reserved", -12.068367004394531 ], [ "stellt", -12.068638801574707 ], [ "▁foto", -12.068668365478516 ], [ "linger", -12.06871223449707 ], [ "▁profiter", -12.068737030029297 ], [ "▁trup", -12.068862915039062 ], [ "▁Hunter", -12.068974494934082 ], [ "▁widespread", -12.069050788879395 ], [ "entretien", -12.069242477416992 ], [ "▁Truck", -12.06958293914795 ], [ "Can", -12.069656372070312 ], [ "péri", -12.06976318359375 ], [ "▁>>", -12.069926261901855 ], [ "▁trains", -12.070141792297363 ], [ "▁faca", -12.070149421691895 ], [ "▁Patienten", -12.070170402526855 ], [ "▁scor", -12.070361137390137 ], [ "▁perceived", -12.070384979248047 ], [ "setzung", -12.070393562316895 ], [ "▁Robin", -12.070558547973633 ], [ "▁geboren", -12.07060718536377 ], [ "lons", -12.070687294006348 ], [ "inţa", -12.070836067199707 ], [ "glob", -12.070887565612793 ], [ "subsequently", -12.07111930847168 ], [ "▁vet", -12.071170806884766 ], [ "▁Holland", -12.071328163146973 ], [ "▁Clinical", -12.071370124816895 ], [ "▁uncertainty", -12.071381568908691 ], [ "hohen", -12.071386337280273 ], [ "uza", -12.071431159973145 ], [ "▁kleiner", -12.071518898010254 ], [ "▁substances", -12.07155704498291 ], [ "ados", -12.071627616882324 ], [ "wheel", -12.07178020477295 ], [ "▁cone", -12.071990966796875 ], [ "▁castig", -12.072218894958496 ], [ "▁Conditions", -12.072242736816406 ], [ "minus", -12.072643280029297 ], [ "▁permits", -12.07265853881836 ], [ "fond", -12.072784423828125 ], [ "▁reactions", -12.07278823852539 ], [ "▁Mario", -12.072819709777832 ], [ "▁materiale", -12.07291030883789 ], [ "AH", -12.072924613952637 ], [ "▁juillet", -12.073172569274902 ], [ "▁juridic", -12.073182106018066 ], [ "▁dropping", -12.073200225830078 ], [ "expérience", -12.073225021362305 ], [ "▁depot", -12.073345184326172 ], [ "▁plea", -12.073490142822266 ], [ "dezvoltarea", -12.073512077331543 ], [ "▁Independent", -12.07363224029541 ], [ "▁Homes", -12.073674201965332 ], [ "▁crust", -12.073808670043945 ], [ "▁pillow", -12.073899269104004 ], [ "kreis", -12.073920249938965 ], [ "▁boiler", -12.073928833007812 ], [ "latin", -12.073978424072266 ], [ "▁stet", -12.074131965637207 ], [ "GH", -12.074143409729004 ], [ "▁absent", -12.074334144592285 ], [ "▁Directors", -12.074501037597656 ], [ "zwischen", -12.07462215423584 ], [ "▁comprendre", -12.07465648651123 ], [ "▁25,", -12.074832916259766 ], [ "▁pharmaceutical", -12.075145721435547 ], [ "▁placeholder", -12.075174331665039 ], [ "KI", -12.075176239013672 ], [ "▁români", -12.07540225982666 ], [ "▁Dollar", -12.075509071350098 ], [ "▁Operations", -12.075525283813477 ], [ "▁Dublin", -12.075550079345703 ], [ "▁drawings", -12.0756196975708 ], [ "▁respir", -12.075769424438477 ], [ "▁haul", -12.0758056640625 ], [ "Obviously", -12.075864791870117 ], [ "▁Beat", -12.075864791870117 ], [ "▁jeans", -12.07590103149414 ], [ "▁Masters", -12.075927734375 ], [ "▁bits", -12.076213836669922 ], [ "poți", -12.076226234436035 ], [ "▁asigur", -12.076228141784668 ], [ "▁intampla", -12.076228141784668 ], [ "▁marc", -12.076282501220703 ], [ "......", -12.076404571533203 ], [ "▁districts", -12.076437950134277 ], [ "cru", -12.076457023620605 ], [ "nav", -12.076608657836914 ], [ "huile", -12.076644897460938 ], [ "▁limitation", -12.076647758483887 ], [ "boat", -12.076712608337402 ], [ "IRE", -12.076720237731934 ], [ "Unis", -12.07675838470459 ], [ "dated", -12.0769624710083 ], [ "▁consultants", -12.07699203491211 ], [ "▁Josh", -12.077007293701172 ], [ "tanz", -12.077184677124023 ], [ "launching", -12.0772066116333 ], [ "▁browsing", -12.077310562133789 ], [ "▁incerc", -12.077314376831055 ], [ "▁27,", -12.077375411987305 ], [ "не", -12.077398300170898 ], [ "wig", -12.077415466308594 ], [ "▁spar", -12.077458381652832 ], [ "▁token", -12.077547073364258 ], [ "▁09", -12.077548027038574 ], [ "spa", -12.07766056060791 ], [ "ometer", -12.07772159576416 ], [ "▁riders", -12.077869415283203 ], [ "▁Drop", -12.077898979187012 ], [ "RN", -12.078103065490723 ], [ "▁pairs", -12.07815933227539 ], [ "▁psychology", -12.078420639038086 ], [ "▁Douglas", -12.078437805175781 ], [ "▁verwenden", -12.078516960144043 ], [ "▁(9", -12.07857894897461 ], [ "▁Rental", -12.078728675842285 ], [ "▁délai", -12.078847885131836 ], [ "▁sooner", -12.078882217407227 ], [ "▁bankruptcy", -12.079109191894531 ], [ "04.", -12.079110145568848 ], [ "abend", -12.079194068908691 ], [ "çon", -12.079237937927246 ], [ "▁Ple", -12.079243659973145 ], [ "fug", -12.079337120056152 ], [ "▁Wohnung", -12.079410552978516 ], [ "▁Preise", -12.079424858093262 ], [ "▁Kay", -12.079427719116211 ], [ "▁notify", -12.079474449157715 ], [ "▁Brain", -12.079534530639648 ], [ "▁optical", -12.079580307006836 ], [ "▁modifications", -12.079727172851562 ], [ "▁repos", -12.07999324798584 ], [ "▁worksheet", -12.0800142288208 ], [ "continu", -12.08005428314209 ], [ "▁assumed", -12.08059024810791 ], [ "varying", -12.080626487731934 ], [ "feier", -12.080643653869629 ], [ "▁Freedom", -12.080717086791992 ], [ "▁Inhalte", -12.080740928649902 ], [ "▁observations", -12.080755233764648 ], [ "▁Gruppe", -12.080791473388672 ], [ "▁Cyber", -12.080883979797363 ], [ "hort", -12.080889701843262 ], [ "▁langue", -12.080915451049805 ], [ "führen", -12.08110523223877 ], [ "ganze", -12.081254005432129 ], [ "▁forte", -12.081327438354492 ], [ "▁Stefan", -12.081376075744629 ], [ "▁Jetzt", -12.081463813781738 ], [ "mehr", -12.081489562988281 ], [ "trip", -12.081549644470215 ], [ "▁poem", -12.081583976745605 ], [ "▁practitioners", -12.081720352172852 ], [ "▁connector", -12.08177661895752 ], [ "ECT", -12.081794738769531 ], [ "▁inseamna", -12.081820487976074 ], [ "addressing", -12.081867218017578 ], [ "▁beliebt", -12.081908226013184 ], [ "▁Mama", -12.082002639770508 ], [ "▁fade", -12.08204460144043 ], [ "messen", -12.08205509185791 ], [ "▁Visa", -12.082080841064453 ], [ "▁Meta", -12.082154273986816 ], [ "lene", -12.082188606262207 ], [ "▁remembered", -12.082334518432617 ], [ "/3", -12.082337379455566 ], [ "apte", -12.082347869873047 ], [ "▁uncomfortable", -12.082364082336426 ], [ "▁romance", -12.08253002166748 ], [ "▁réalis", -12.082601547241211 ], [ "▁Vincent", -12.082706451416016 ], [ "▁ABC", -12.08275318145752 ], [ "▁handicap", -12.082756042480469 ], [ "▁Shin", -12.082801818847656 ], [ "▁Hunde", -12.082847595214844 ], [ "▁Ach", -12.083131790161133 ], [ "▁Questions", -12.083136558532715 ], [ "▁particles", -12.083226203918457 ], [ "usch", -12.083230018615723 ], [ "▁SUV", -12.083279609680176 ], [ "▁Tous", -12.083301544189453 ], [ "▁empower", -12.08336067199707 ], [ "▁Yi", -12.083446502685547 ], [ "▁LinkedIn", -12.083453178405762 ], [ "▁Profile", -12.083507537841797 ], [ "▁surround", -12.083553314208984 ], [ "▁wh", -12.083560943603516 ], [ "▁Weiter", -12.083577156066895 ], [ "▁Weight", -12.083672523498535 ], [ "▁creatures", -12.083807945251465 ], [ "Especially", -12.08381462097168 ], [ "▁repede", -12.08383560180664 ], [ "▁albums", -12.083885192871094 ], [ "▁compatibil", -12.0839204788208 ], [ "▁Interesse", -12.083929061889648 ], [ "abili", -12.084062576293945 ], [ "▁roast", -12.084310531616211 ], [ "▁unii", -12.084310531616211 ], [ "▁Glad", -12.084421157836914 ], [ "▁enthusiasm", -12.084539413452148 ], [ "▁whisk", -12.084547996520996 ], [ "▁freezer", -12.084712982177734 ], [ "▁stolen", -12.084715843200684 ], [ "▁neighbour", -12.084883689880371 ], [ "▁sake", -12.084967613220215 ], [ "▁Effect", -12.0850191116333 ], [ "▁fighter", -12.085044860839844 ], [ "▁tranquil", -12.085084915161133 ], [ "▁organizer", -12.085199356079102 ], [ "pixel", -12.085306167602539 ], [ "▁Guest", -12.085338592529297 ], [ "▁Philipp", -12.085369110107422 ], [ "kunft", -12.085382461547852 ], [ "▁Meer", -12.085409164428711 ], [ "▁inviting", -12.085432052612305 ], [ "gänge", -12.085450172424316 ], [ "▁Position", -12.085627555847168 ], [ "giving", -12.085693359375 ], [ "▁marble", -12.085807800292969 ], [ "▁neg", -12.085813522338867 ], [ "▁Haar", -12.085914611816406 ], [ "Ein", -12.086039543151855 ], [ "▁buses", -12.086187362670898 ], [ "▁Lodge", -12.086188316345215 ], [ "soare", -12.086319923400879 ], [ "▁Barn", -12.086409568786621 ], [ "▁captain", -12.086527824401855 ], [ "▁Fix", -12.08657169342041 ], [ "ulate", -12.086629867553711 ], [ "ență", -12.086709022521973 ], [ "▁finances", -12.086770057678223 ], [ "▁VIP", -12.086800575256348 ], [ "▁Adams", -12.086801528930664 ], [ "▁spécialisé", -12.086960792541504 ], [ "▁fortunate", -12.087236404418945 ], [ "ility", -12.087345123291016 ], [ "▁democracy", -12.08749771118164 ], [ "shu", -12.087580680847168 ], [ "▁consiste", -12.087624549865723 ], [ "▁tort", -12.087692260742188 ], [ "▁branding", -12.087793350219727 ], [ "▁porch", -12.08780288696289 ], [ "UNI", -12.087867736816406 ], [ "▁placut", -12.087915420532227 ], [ "▁coupled", -12.088058471679688 ], [ "▁ministre", -12.088187217712402 ], [ "▁minerals", -12.088335037231445 ], [ "▁safer", -12.088335990905762 ], [ "▁outlets", -12.088438034057617 ], [ "▁caution", -12.08864688873291 ], [ "▁lightly", -12.0886869430542 ], [ "▁utilizator", -12.088700294494629 ], [ "▁Pala", -12.088959693908691 ], [ "▁doll", -12.088961601257324 ], [ "(1)", -12.089065551757812 ], [ "chol", -12.089120864868164 ], [ "▁Left", -12.08919620513916 ], [ "▁roulant", -12.089277267456055 ], [ "▁propune", -12.089301109313965 ], [ "▁Cred", -12.089339256286621 ], [ "▁negotiations", -12.089362144470215 ], [ "amba", -12.089393615722656 ], [ "▁grasp", -12.089420318603516 ], [ "▁Amsterdam", -12.089451789855957 ], [ "▁Zweck", -12.08945369720459 ], [ "▁conven", -12.089563369750977 ], [ "▁organizing", -12.089574813842773 ], [ "section", -12.089618682861328 ], [ "▁endeavor", -12.089634895324707 ], [ "▁basics", -12.089722633361816 ], [ "jud", -12.089874267578125 ], [ "▁yarn", -12.090049743652344 ], [ "▁shout", -12.09009075164795 ], [ "fällt", -12.090285301208496 ], [ "▁dragoste", -12.09054946899414 ], [ "▁Rein", -12.090594291687012 ], [ "Cal", -12.090688705444336 ], [ "▁deaths", -12.090729713439941 ], [ "▁24,", -12.0907564163208 ], [ "▁măr", -12.090773582458496 ], [ "server", -12.090825080871582 ], [ "▁explic", -12.09085464477539 ], [ "▁sufer", -12.090903282165527 ], [ "▁lucrări", -12.091097831726074 ], [ "▁Disease", -12.091126441955566 ], [ "▁prescribed", -12.091194152832031 ], [ "prozess", -12.091285705566406 ], [ "▁dessin", -12.091343879699707 ], [ "▁refuge", -12.091473579406738 ], [ "▁cope", -12.091631889343262 ], [ "pole", -12.09196949005127 ], [ "▁vacant", -12.091984748840332 ], [ "▁sezon", -12.092035293579102 ], [ "▁Carbon", -12.092227935791016 ], [ "▁goût", -12.092233657836914 ], [ "Ste", -12.092320442199707 ], [ "▁surroundings", -12.092754364013672 ], [ "definite", -12.09284496307373 ], [ "▁adaptation", -12.093358993530273 ], [ "cteur", -12.0933837890625 ], [ "System", -12.093442916870117 ], [ "▁Burg", -12.093550682067871 ], [ "▁retention", -12.093579292297363 ], [ "examen", -12.093618392944336 ], [ "▁adjustments", -12.093668937683105 ], [ "nies", -12.094213485717773 ], [ "▁RSS", -12.094215393066406 ], [ "▁Umwelt", -12.094259262084961 ], [ "▁strengths", -12.094326972961426 ], [ "loom", -12.094401359558105 ], [ "▁pics", -12.094404220581055 ], [ "phase", -12.09443187713623 ], [ "▁Poland", -12.094472885131836 ], [ "▁practicing", -12.094558715820312 ], [ "monetary", -12.094756126403809 ], [ "▁embodiment", -12.094756126403809 ], [ "▁jocuri", -12.094846725463867 ], [ "▁impreuna", -12.094939231872559 ], [ "▁Lyon", -12.094985961914062 ], [ "keeping", -12.095157623291016 ], [ "▁Starting", -12.095202445983887 ], [ "▁începe", -12.095357894897461 ], [ "▁clay", -12.095440864562988 ], [ "bildung", -12.095444679260254 ], [ "Technologie", -12.095513343811035 ], [ "toxic", -12.095624923706055 ], [ "▁gasit", -12.095819473266602 ], [ "rott", -12.095870018005371 ], [ "brook", -12.095935821533203 ], [ "▁wann", -12.096029281616211 ], [ "▁lined", -12.09610366821289 ], [ "▁Chelsea", -12.096223831176758 ], [ "▁Orlando", -12.096224784851074 ], [ "▁Otherwise", -12.096267700195312 ], [ "▁debit", -12.096273422241211 ], [ "▁entsprechend", -12.09648323059082 ], [ "nism", -12.09654426574707 ], [ "issen", -12.09664535522461 ], [ "▁rendez", -12.096646308898926 ], [ "▁processus", -12.096745491027832 ], [ "mbi", -12.096890449523926 ], [ "▁Graduate", -12.096960067749023 ], [ "▁cozy", -12.097119331359863 ], [ "▁Freunde", -12.097320556640625 ], [ "▁teme", -12.097389221191406 ], [ "▁bias", -12.097548484802246 ], [ "102", -12.09756851196289 ], [ "terrorism", -12.09770679473877 ], [ "threatening", -12.097756385803223 ], [ "ни", -12.097776412963867 ], [ "▁Sonntag", -12.098062515258789 ], [ "▁efect", -12.098116874694824 ], [ "▁prayers", -12.098134994506836 ], [ "▁backpack", -12.09841537475586 ], [ "?)", -12.098489761352539 ], [ "▁searches", -12.098788261413574 ], [ "ouverture", -12.09880256652832 ], [ "▁sustained", -12.098865509033203 ], [ "hawk", -12.098869323730469 ], [ "messe", -12.098958969116211 ], [ "▁prototype", -12.098989486694336 ], [ "▁stră", -12.09903335571289 ], [ "▁Neo", -12.099040985107422 ], [ "▁29,", -12.099109649658203 ], [ "izo", -12.099306106567383 ], [ "▁Anton", -12.099333763122559 ], [ "SIS", -12.099564552307129 ], [ "pendant", -12.099617958068848 ], [ "▁passive", -12.099813461303711 ], [ "▁Aaron", -12.099824905395508 ], [ "▁Karen", -12.099831581115723 ], [ "▁Bildung", -12.09994888305664 ], [ "ario", -12.099949836730957 ], [ "▁regulator", -12.100006103515625 ], [ "gruppe", -12.100032806396484 ], [ "stepped", -12.100053787231445 ], [ "▁interventions", -12.10014533996582 ], [ "▁rounds", -12.100149154663086 ], [ "▁Khan", -12.10020637512207 ], [ "▁railway", -12.10028076171875 ], [ "▁souvenir", -12.100296974182129 ], [ "▁Plans", -12.100336074829102 ], [ "aille", -12.100372314453125 ], [ "▁billing", -12.100473403930664 ], [ "▁Spiele", -12.100541114807129 ], [ "▁supermarket", -12.100556373596191 ], [ "▁flows", -12.100625991821289 ], [ "▁PayPal", -12.100641250610352 ], [ "▁tribe", -12.10067081451416 ], [ "anni", -12.100780487060547 ], [ "▁rides", -12.100934982299805 ], [ "▁Orleans", -12.101009368896484 ], [ "▁evaluated", -12.101021766662598 ], [ "founder", -12.10106372833252 ], [ "▁Feld", -12.101212501525879 ], [ "▁altele", -12.10122299194336 ], [ "▁thermo", -12.101290702819824 ], [ "ugh", -12.101330757141113 ], [ "▁adus", -12.101375579833984 ], [ "▁Taiwan", -12.101396560668945 ], [ "▁clause", -12.101409912109375 ], [ "oxi", -12.101465225219727 ], [ "alcool", -12.101495742797852 ], [ "▁Noi", -12.101531982421875 ], [ "rub", -12.101540565490723 ], [ "▁dosar", -12.101582527160645 ], [ "▁Nelson", -12.101751327514648 ], [ "fassung", -12.102316856384277 ], [ "▁Kill", -12.102489471435547 ], [ "▁Standards", -12.102490425109863 ], [ "▁upward", -12.102653503417969 ], [ "▁Coloring", -12.102664947509766 ], [ "Designed", -12.102754592895508 ], [ "▁Nou", -12.10281753540039 ], [ "▁borrow", -12.102940559387207 ], [ "▁Poll", -12.10321044921875 ], [ "▁antibiotic", -12.103277206420898 ], [ "▁fabrication", -12.103388786315918 ], [ "quo", -12.103432655334473 ], [ "▁crimes", -12.103464126586914 ], [ "▁nahe", -12.103484153747559 ], [ "▁aplicat", -12.103565216064453 ], [ "OST", -12.1035737991333 ], [ "▁Beijing", -12.103599548339844 ], [ "fight", -12.103612899780273 ], [ "▁lodge", -12.103612899780273 ], [ "dreh", -12.103922843933105 ], [ "▁harness", -12.104036331176758 ], [ "▁noiembrie", -12.104151725769043 ], [ "ounded", -12.104161262512207 ], [ "▁Imp", -12.1041841506958 ], [ "▁nächste", -12.104275703430176 ], [ "funktion", -12.104476928710938 ], [ "exploitation", -12.104569435119629 ], [ "▁Ready", -12.10457706451416 ], [ "▁Plate", -12.104598999023438 ], [ "▁octombrie", -12.104706764221191 ], [ "▁considerat", -12.104982376098633 ], [ "▁Xbox", -12.105067253112793 ], [ "mind", -12.105107307434082 ], [ "▁Lind", -12.105111122131348 ], [ "runde", -12.105352401733398 ], [ "mination", -12.105374336242676 ], [ "▁memori", -12.105377197265625 ], [ "▁cere", -12.105389595031738 ], [ "barkeit", -12.105517387390137 ], [ "▁găsi", -12.105761528015137 ], [ "2.1", -12.105863571166992 ], [ "▁Finding", -12.105891227722168 ], [ "▁static", -12.106405258178711 ], [ "court", -12.106439590454102 ], [ "▁Gem", -12.106489181518555 ], [ "▁pièce", -12.106494903564453 ], [ "▁reel", -12.10651969909668 ], [ "▁manuscript", -12.106560707092285 ], [ "▁complications", -12.106578826904297 ], [ "▁controlling", -12.106585502624512 ], [ "▁favour", -12.106738090515137 ], [ "▁advancement", -12.106739044189453 ], [ "▁Radi", -12.106870651245117 ], [ "▁faites", -12.107076644897461 ], [ "▁ordin", -12.107131958007812 ], [ "sorted", -12.107152938842773 ], [ "▁1982", -12.10715389251709 ], [ "▁brutal", -12.107154846191406 ], [ "▁Guy", -12.107226371765137 ], [ "▁accomplishment", -12.107248306274414 ], [ "▁wer", -12.107329368591309 ], [ "▁withdraw", -12.107460975646973 ], [ "abilitate", -12.1075439453125 ], [ "▁NBA", -12.107625961303711 ], [ "▁Benefit", -12.107675552368164 ], [ "▁divide", -12.107824325561523 ], [ "induced", -12.107913970947266 ], [ "▁văzut", -12.108049392700195 ], [ "▁peel", -12.10807991027832 ], [ "▁joints", -12.108160972595215 ], [ "▁enthalten", -12.108301162719727 ], [ "▁spy", -12.108397483825684 ], [ "▁occasional", -12.108437538146973 ], [ "warm", -12.108514785766602 ], [ "ême", -12.108542442321777 ], [ "▁Betriebs", -12.108551979064941 ], [ "▁Ioan", -12.1087064743042 ], [ "▁balloon", -12.108809471130371 ], [ "▁leap", -12.108869552612305 ], [ "pelled", -12.109000205993652 ], [ "▁realise", -12.109073638916016 ], [ "▁Retail", -12.109118461608887 ], [ "▁Farben", -12.109151840209961 ], [ "▁Kennedy", -12.10916519165039 ], [ "▁Firma", -12.109196662902832 ], [ "▁tineri", -12.10934066772461 ], [ "tub", -12.109354019165039 ], [ "PORT", -12.109381675720215 ], [ "▁stiff", -12.109416007995605 ], [ "▁notable", -12.109476089477539 ], [ "tler", -12.109498023986816 ], [ "▁utile", -12.10958480834961 ], [ "▁jouer", -12.109674453735352 ], [ "▁Primary", -12.109735488891602 ], [ "▁retailer", -12.109764099121094 ], [ "▁jederzeit", -12.109808921813965 ], [ "▁amend", -12.109817504882812 ], [ "▁sagte", -12.109845161437988 ], [ "atch", -12.10995864868164 ], [ "ution", -12.110008239746094 ], [ "once", -12.110018730163574 ], [ "ended", -12.1100435256958 ], [ "▁literary", -12.11013126373291 ], [ "▁wrist", -12.110281944274902 ], [ "vii", -12.11036205291748 ], [ "scriere", -12.110367774963379 ], [ "▁compassion", -12.110443115234375 ], [ "▁Milan", -12.110474586486816 ], [ "▁Dach", -12.110490798950195 ], [ "▁problèmes", -12.110630989074707 ], [ "▁Pré", -12.110687255859375 ], [ "▁Feder", -12.110759735107422 ], [ "Dr", -12.110814094543457 ], [ "Spr", -12.110908508300781 ], [ "▁né", -12.110969543457031 ], [ "François", -12.111023902893066 ], [ "▁Shu", -12.111115455627441 ], [ "▁poison", -12.111154556274414 ], [ "zier", -12.111176490783691 ], [ "▁attain", -12.11124038696289 ], [ "▁switching", -12.111310958862305 ], [ "▁vibration", -12.111348152160645 ], [ "▁Tablet", -12.11136531829834 ], [ "▁Lern", -12.11148452758789 ], [ "offrir", -12.111660957336426 ], [ "123", -12.11168098449707 ], [ "cheapest", -12.11173152923584 ], [ "▁numărul", -12.111764907836914 ], [ "break", -12.11180305480957 ], [ "cyto", -12.111836433410645 ], [ "▁Mississippi", -12.111955642700195 ], [ "▁dragon", -12.11207389831543 ], [ "fir", -12.112176895141602 ], [ "▁fête", -12.112180709838867 ], [ "▁Wait", -12.112350463867188 ], [ "buy", -12.112359046936035 ], [ "având", -12.112391471862793 ], [ "▁Scar", -12.112517356872559 ], [ "▁Hund", -12.112586975097656 ], [ "bug", -12.112807273864746 ], [ "▁classique", -12.112811088562012 ], [ "▁tenant", -12.112860679626465 ], [ "▁Walt", -12.11296272277832 ], [ "▁timber", -12.11296272277832 ], [ "inscription", -12.11300277709961 ], [ "BD", -12.113016128540039 ], [ "▁Commissioner", -12.113018989562988 ], [ "▁casinos", -12.11306095123291 ], [ "▁prochain", -12.113168716430664 ], [ "▁rustic", -12.11349868774414 ], [ "▁Kent", -12.113607406616211 ], [ "▁Deci", -12.113761901855469 ], [ "ли", -12.113855361938477 ], [ "▁crossed", -12.113861083984375 ], [ "▁delightful", -12.113869667053223 ], [ "▁metres", -12.113872528076172 ], [ "▁scandal", -12.113906860351562 ], [ "▁activitate", -12.113986015319824 ], [ "▁nimeni", -12.114009857177734 ], [ "ease", -12.11402416229248 ], [ "▁revenues", -12.1140775680542 ], [ "▁partially", -12.114187240600586 ], [ "AE", -12.114263534545898 ], [ "nique", -12.114410400390625 ], [ "▁fixtures", -12.114426612854004 ], [ "▁pupils", -12.114694595336914 ], [ "Lib", -12.11471176147461 ], [ "analyse", -12.114739418029785 ], [ "▁Oracle", -12.114767074584961 ], [ "troph", -12.114859580993652 ], [ "▁detected", -12.114879608154297 ], [ "▁servant", -12.11507797241211 ], [ "▁badly", -12.115121841430664 ], [ "comparing", -12.115150451660156 ], [ "abs", -12.115238189697266 ], [ "▁fotografi", -12.115443229675293 ], [ "▁Million", -12.115541458129883 ], [ "▁Gordon", -12.11557388305664 ], [ "▁Smok", -12.115592002868652 ], [ "▁Essay", -12.11565113067627 ], [ "eptic", -12.115665435791016 ], [ "▁Transportation", -12.115728378295898 ], [ "/2019", -12.115767478942871 ], [ "▁alignment", -12.115778923034668 ], [ "▁laut", -12.11578369140625 ], [ "stände", -12.115791320800781 ], [ "▁concerts", -12.115811347961426 ], [ "▁weekends", -12.11589241027832 ], [ "▁obstacles", -12.115941047668457 ], [ "wür", -12.115964889526367 ], [ "▁Fisher", -12.116219520568848 ], [ "▁supervisor", -12.116242408752441 ], [ "▁traders", -12.116262435913086 ], [ "▁scary", -12.116484642028809 ], [ "▁Grove", -12.116538047790527 ], [ "▁expose", -12.116583824157715 ], [ "▁enemies", -12.116630554199219 ], [ "▁Lux", -12.11667537689209 ], [ "▁Berufs", -12.11672306060791 ], [ "▁Sheet", -12.116780281066895 ], [ "▁Natürlich", -12.116819381713867 ], [ "▁examined", -12.116886138916016 ], [ "pursuing", -12.116920471191406 ], [ "▁pools", -12.116923332214355 ], [ "▁Thompson", -12.117005348205566 ], [ "▁SAP", -12.117010116577148 ], [ "claiming", -12.117053985595703 ], [ "buried", -12.117055892944336 ], [ "assurance", -12.117138862609863 ], [ "▁sandwich", -12.117195129394531 ], [ "uber", -12.117310523986816 ], [ "▁laisse", -12.117321968078613 ], [ "peak", -12.117348670959473 ], [ "spring", -12.1173677444458 ], [ "▁august", -12.117369651794434 ], [ "▁benötigt", -12.11738109588623 ], [ "▁achievements", -12.117470741271973 ], [ "coala", -12.117478370666504 ], [ "▁scr", -12.117842674255371 ], [ "gesagt", -12.118122100830078 ], [ "▁envelope", -12.118141174316406 ], [ "▁mapping", -12.118169784545898 ], [ "▁Suche", -12.118298530578613 ], [ "first", -12.118329048156738 ], [ "▁Quin", -12.118447303771973 ], [ "räu", -12.118561744689941 ], [ "▁răs", -12.118583679199219 ], [ "chemical", -12.118597984313965 ], [ "dad", -12.118927955627441 ], [ "formation", -12.118983268737793 ], [ "▁cushion", -12.119026184082031 ], [ "▁Maß", -12.119046211242676 ], [ "07.", -12.119184494018555 ], [ "▁perioadă", -12.119257926940918 ], [ "▁Wunsch", -12.11925983428955 ], [ "▁joi", -12.119423866271973 ], [ "▁$25", -12.119482040405273 ], [ "▁uploaded", -12.11952018737793 ], [ "▁hobby", -12.119633674621582 ], [ "▁septembrie", -12.119633674621582 ], [ "▁Dimension", -12.119634628295898 ], [ "▁domeniu", -12.119661331176758 ], [ "▁Tourism", -12.119747161865234 ], [ "▁fais", -12.119800567626953 ], [ "aches", -12.119919776916504 ], [ "neck", -12.119969367980957 ], [ "▁Chip", -12.119982719421387 ], [ "▁Tisch", -12.1199951171875 ], [ "▁Pai", -12.120006561279297 ], [ "▁Butter", -12.120083808898926 ], [ "▁altor", -12.120133399963379 ], [ "cultural", -12.120182991027832 ], [ "▁bases", -12.12028980255127 ], [ "▁Christopher", -12.120396614074707 ], [ "Kindle", -12.120401382446289 ], [ "▁bathrooms", -12.12049388885498 ], [ "▁civilian", -12.12052059173584 ], [ "▁Architecture", -12.12058162689209 ], [ "heiten", -12.120641708374023 ], [ "otte", -12.120763778686523 ], [ "ри", -12.120784759521484 ], [ "wash", -12.120792388916016 ], [ "▁evenimente", -12.12086296081543 ], [ "lade", -12.121132850646973 ], [ "▁ermöglicht", -12.121140480041504 ], [ "Port", -12.121149063110352 ], [ "▁Horn", -12.12119197845459 ], [ "▁Housing", -12.121232032775879 ], [ "▁Profit", -12.121304512023926 ], [ "▁stressed", -12.12136459350586 ], [ "▁70%", -12.121431350708008 ], [ "laying", -12.121458053588867 ], [ "▁specialize", -12.121490478515625 ], [ "▁Published", -12.121519088745117 ], [ "corp", -12.121554374694824 ], [ "▁revision", -12.121611595153809 ], [ "▁sail", -12.121804237365723 ], [ "courtesy", -12.121909141540527 ], [ "tax", -12.1219482421875 ], [ "▁perfekt", -12.122018814086914 ], [ "▁Risk", -12.122088432312012 ], [ "▁chaleur", -12.122129440307617 ], [ "ych", -12.122132301330566 ], [ "▁spine", -12.12218189239502 ], [ "▁holders", -12.122264862060547 ], [ "▁Speaking", -12.122271537780762 ], [ "▁Bernard", -12.122400283813477 ], [ "incarc", -12.122532844543457 ], [ "shalb", -12.122639656066895 ], [ "Potrivit", -12.12264633178711 ], [ "arising", -12.122654914855957 ], [ "▁kingdom", -12.122665405273438 ], [ "▁potato", -12.122766494750977 ], [ "▁promoted", -12.122814178466797 ], [ "▁judges", -12.1228609085083 ], [ "▁naturelle", -12.122992515563965 ], [ "▁Kindern", -12.123022079467773 ], [ "schicht", -12.123047828674316 ], [ "▁Drag", -12.123066902160645 ], [ "atta", -12.123132705688477 ], [ "soient", -12.123249053955078 ], [ "INS", -12.12336540222168 ], [ "▁legislative", -12.123642921447754 ], [ "▁teens", -12.123785018920898 ], [ "▁Fotos", -12.123842239379883 ], [ "▁illustrations", -12.12392520904541 ], [ "möglichkeiten", -12.12415599822998 ], [ "Votre", -12.124194145202637 ], [ "▁tarif", -12.124195098876953 ], [ "cli", -12.124488830566406 ], [ "▁landlord", -12.12473201751709 ], [ "cine", -12.124743461608887 ], [ "▁bot", -12.124798774719238 ], [ "enhancing", -12.12491226196289 ], [ "▁März", -12.12491226196289 ], [ "▁succès", -12.125106811523438 ], [ "▁disclose", -12.125120162963867 ], [ "▁Geräte", -12.125321388244629 ], [ "▁Magn", -12.125422477722168 ], [ "dessous", -12.12580680847168 ], [ "▁miracle", -12.125862121582031 ], [ "▁travailler", -12.125933647155762 ], [ "▁herb", -12.125945091247559 ], [ "-01", -12.126049041748047 ], [ "litre", -12.126104354858398 ], [ "▁tău", -12.126120567321777 ], [ "ACC", -12.126190185546875 ], [ "▁diminu", -12.126275062561035 ], [ "itzer", -12.126317024230957 ], [ "▁personenbezogen", -12.126395225524902 ], [ "▁Pure", -12.126436233520508 ], [ "▁influences", -12.12668228149414 ], [ "ană", -12.126765251159668 ], [ "▁proposer", -12.126856803894043 ], [ "▁longest", -12.12692642211914 ], [ "euses", -12.127080917358398 ], [ "/1", -12.127487182617188 ], [ "hafte", -12.127716064453125 ], [ "▁Dich", -12.127761840820312 ], [ "▁candle", -12.128026962280273 ], [ "ouche", -12.128191947937012 ], [ "installation", -12.128241539001465 ], [ "▁Includes", -12.128280639648438 ], [ "▁entfernt", -12.12831974029541 ], [ "traf", -12.128499031066895 ], [ "▁None", -12.128508567810059 ], [ "▁produc", -12.128510475158691 ], [ "held", -12.128519058227539 ], [ "graphic", -12.128531455993652 ], [ "▁demographic", -12.128584861755371 ], [ "ingham", -12.1287841796875 ], [ "schul", -12.128812789916992 ], [ "▁sneak", -12.128843307495117 ], [ "laub", -12.128889083862305 ], [ "▁thickness", -12.12911605834961 ], [ "▁killer", -12.129297256469727 ], [ "▁entsprechende", -12.129344940185547 ], [ "▁theft", -12.129396438598633 ], [ "▁Jerusalem", -12.129457473754883 ], [ "Adapt", -12.129495620727539 ], [ "▁updating", -12.129497528076172 ], [ "tete", -12.12954330444336 ], [ "▁warming", -12.129701614379883 ], [ "anlage", -12.129739761352539 ], [ "▁lenders", -12.129814147949219 ], [ "mobile", -12.130008697509766 ], [ "▁Package", -12.130080223083496 ], [ "▁Volume", -12.130152702331543 ], [ "---", -12.130167007446289 ], [ "▁Others", -12.130173683166504 ], [ "content", -12.130188941955566 ], [ "tement", -12.130253791809082 ], [ "bildet", -12.13027572631836 ], [ "▁washer", -12.13053035736084 ], [ "▁freelance", -12.130623817443848 ], [ "▁fein", -12.130753517150879 ], [ "▁catering", -12.130851745605469 ], [ "▁warmth", -12.130911827087402 ], [ "▁Month", -12.131103515625 ], [ "▁Federation", -12.131134033203125 ], [ "▁editorial", -12.13121223449707 ], [ "▁Shopping", -12.131241798400879 ], [ "▁efort", -12.131296157836914 ], [ "▁damp", -12.131314277648926 ], [ "▁declined", -12.131332397460938 ], [ "▁1978", -12.13135051727295 ], [ "6,000", -12.131355285644531 ], [ "location", -12.131551742553711 ], [ "▁blogger", -12.131572723388672 ], [ "▁goodness", -12.131826400756836 ], [ "▁Purchase", -12.132119178771973 ], [ "▁suspended", -12.132159233093262 ], [ "▁assessed", -12.132201194763184 ], [ "rada", -12.132286071777344 ], [ "▁Lac", -12.132291793823242 ], [ "▁angeboten", -12.13235092163086 ], [ "▁Wetter", -12.132370948791504 ], [ "ores", -12.13243579864502 ], [ "▁fourni", -12.132476806640625 ], [ "▁retire", -12.13269329071045 ], [ "▁Baptist", -12.132741928100586 ], [ "▁Saison", -12.13277530670166 ], [ "Bar", -12.132794380187988 ], [ "▁dossier", -12.132979393005371 ], [ "brow", -12.133044242858887 ], [ "▁Kaffee", -12.133071899414062 ], [ "-25", -12.133463859558105 ], [ "▁festivals", -12.133599281311035 ], [ "▁sellers", -12.133716583251953 ], [ "Ü", -12.13393783569336 ], [ "▁publisher", -12.133960723876953 ], [ "▁Designs", -12.133970260620117 ], [ "▁putut", -12.13400936126709 ], [ "▁Built", -12.134417533874512 ], [ "▁recreational", -12.134476661682129 ], [ "▁european", -12.134514808654785 ], [ "▁binary", -12.134631156921387 ], [ "▁Nieder", -12.134764671325684 ], [ "taking", -12.1348237991333 ], [ "▁Lots", -12.13494873046875 ], [ "▁recognised", -12.135031700134277 ], [ "ssant", -12.135063171386719 ], [ "ITE", -12.135271072387695 ], [ "oom", -12.135298728942871 ], [ "▁Kre", -12.135310173034668 ], [ "▁pipes", -12.135631561279297 ], [ "▁hinge", -12.135653495788574 ], [ "▁enterprises", -12.135664939880371 ], [ "▁texts", -12.13583755493164 ], [ "Organiz", -12.136080741882324 ], [ "▁suivre", -12.136124610900879 ], [ "noc", -12.136157989501953 ], [ "fair", -12.136194229125977 ], [ "▁darkness", -12.136305809020996 ], [ "▁Whi", -12.13631534576416 ], [ "natural", -12.136321067810059 ], [ "Bas", -12.136422157287598 ], [ "▁tribute", -12.136443138122559 ], [ "▁Naţional", -12.136573791503906 ], [ "hara", -12.136622428894043 ], [ "▁catégorie", -12.136697769165039 ], [ "▁Schedule", -12.136698722839355 ], [ "▁lernen", -12.13671875 ], [ "▁Plastic", -12.136725425720215 ], [ "▁giveaway", -12.13675594329834 ], [ "▁Ideen", -12.136906623840332 ], [ "▁circa", -12.13718032836914 ], [ "▁lice", -12.137242317199707 ], [ "▁Meinung", -12.137264251708984 ], [ "▁beside", -12.137566566467285 ], [ "▁vazut", -12.137673377990723 ], [ "strom", -12.137749671936035 ], [ "boro", -12.137775421142578 ], [ "▁Soon", -12.137796401977539 ], [ "dozens", -12.137896537780762 ], [ "▁Arena", -12.137943267822266 ], [ "▁viața", -12.137989044189453 ], [ "▁Impact", -12.138082504272461 ], [ "current", -12.138106346130371 ], [ "FM", -12.138117790222168 ], [ "▁coil", -12.138657569885254 ], [ "gold", -12.138679504394531 ], [ "▁spate", -12.138679504394531 ], [ "1.4", -12.13875675201416 ], [ "solution", -12.138769149780273 ], [ "▁Wayne", -12.138835906982422 ], [ "▁queen", -12.138898849487305 ], [ "illion", -12.139022827148438 ], [ "greifen", -12.139127731323242 ], [ "▁Bil", -12.139174461364746 ], [ "rote", -12.139185905456543 ], [ "END", -12.13918685913086 ], [ "äl", -12.139206886291504 ], [ "▁reçu", -12.139378547668457 ], [ "flower", -12.139495849609375 ], [ "▁draws", -12.139519691467285 ], [ "plant", -12.139605522155762 ], [ "2010", -12.139702796936035 ], [ "▁oper", -12.139762878417969 ], [ "▁conserve", -12.139777183532715 ], [ "▁sprinkle", -12.13984203338623 ], [ "mode", -12.139924049377441 ], [ "▁lifting", -12.139941215515137 ], [ "▁Institution", -12.139951705932617 ], [ "Când", -12.14001750946045 ], [ "Aus", -12.140048027038574 ], [ "▁fears", -12.140054702758789 ], [ "▁appointments", -12.140079498291016 ], [ "oarele", -12.140162467956543 ], [ "▁duck", -12.140193939208984 ], [ "▁stadium", -12.140213012695312 ], [ "▁vezi", -12.140227317810059 ], [ "▁lap", -12.140315055847168 ], [ "▁proceeds", -12.140382766723633 ], [ "geschlossen", -12.140412330627441 ], [ "▁tren", -12.140478134155273 ], [ "VS", -12.140536308288574 ], [ "▁vais", -12.140800476074219 ], [ "ținut", -12.140859603881836 ], [ "▁Concert", -12.140928268432617 ], [ "▁planting", -12.141008377075195 ], [ "▁honour", -12.141069412231445 ], [ "▁gras", -12.141071319580078 ], [ "woo", -12.141092300415039 ], [ "▁Hero", -12.141282081604004 ], [ "▁stimulate", -12.14134407043457 ], [ "▁überhaupt", -12.141426086425781 ], [ "▁bounce", -12.14148235321045 ], [ "oodle", -12.14151382446289 ], [ "▁packs", -12.141576766967773 ], [ "▁Poker", -12.14158821105957 ], [ "▁acea", -12.141684532165527 ], [ "▁parish", -12.141754150390625 ], [ "-24", -12.141766548156738 ], [ "▁iTunes", -12.141874313354492 ], [ "▁lumière", -12.141948699951172 ], [ "third", -12.142024993896484 ], [ "▁dynamics", -12.142038345336914 ], [ "Unless", -12.142162322998047 ], [ "▁immense", -12.142416000366211 ], [ "▁Sec", -12.142781257629395 ], [ "lois", -12.143009185791016 ], [ "époque", -12.14302921295166 ], [ "NB", -12.143139839172363 ], [ "written", -12.143210411071777 ], [ "▁logement", -12.143226623535156 ], [ "submitting", -12.143295288085938 ], [ "▁Quand", -12.14331340789795 ], [ "▁foi", -12.143322944641113 ], [ "▁catalogue", -12.143351554870605 ], [ "nova", -12.14343547821045 ], [ "▁prezentat", -12.143527030944824 ], [ "▁tart", -12.143877983093262 ], [ "те", -12.143912315368652 ], [ "hack", -12.143916130065918 ], [ "▁Politic", -12.144003868103027 ], [ "▁18,", -12.144048690795898 ], [ "▁ignored", -12.144145965576172 ], [ "▁spoon", -12.144245147705078 ], [ "▁Joy", -12.144280433654785 ], [ "▁reside", -12.144482612609863 ], [ ".99", -12.144488334655762 ], [ "lytic", -12.144625663757324 ], [ "▁bogat", -12.144643783569336 ], [ "▁nurses", -12.144845008850098 ], [ "▁funcţi", -12.145029067993164 ], [ "▁produselor", -12.145038604736328 ], [ "▁Associates", -12.145069122314453 ], [ "Est", -12.14511489868164 ], [ "▁peanut", -12.145187377929688 ], [ "▁résultat", -12.145257949829102 ], [ "08.", -12.145424842834473 ], [ "▁Astro", -12.145439147949219 ], [ "▁personnelle", -12.145527839660645 ], [ "320", -12.145668983459473 ], [ "▁Grab", -12.145748138427734 ], [ "éco", -12.145801544189453 ], [ "▁clasic", -12.145857810974121 ], [ "offre", -12.14588451385498 ], [ "▁idee", -12.14589786529541 ], [ "▁cheat", -12.146259307861328 ], [ "▁Flug", -12.146286964416504 ], [ "▁1500", -12.146413803100586 ], [ "▁kurze", -12.14643383026123 ], [ "With", -12.146512985229492 ], [ "▁Half", -12.146575927734375 ], [ "▁disciplines", -12.146642684936523 ], [ "sorption", -12.14669132232666 ], [ "▁greutate", -12.146927833557129 ], [ "mä", -12.146940231323242 ], [ "▁Literatur", -12.146956443786621 ], [ "3/", -12.147016525268555 ], [ "4.0", -12.147095680236816 ], [ "▁déco", -12.147119522094727 ], [ "▁Fuß", -12.147233963012695 ], [ "▁Deutsche", -12.147289276123047 ], [ "▁abundance", -12.14746379852295 ], [ "▁Luther", -12.14750862121582 ], [ "▁nutritional", -12.147562980651855 ], [ "▁Jude", -12.147687911987305 ], [ "AY", -12.14786148071289 ], [ "▁chore", -12.147916793823242 ], [ "▁Kro", -12.148006439208984 ], [ "▁alin", -12.14801025390625 ], [ "lösung", -12.148030281066895 ], [ "▁geworden", -12.148238182067871 ], [ "▁sociaux", -12.148255348205566 ], [ "▁Spark", -12.1486177444458 ], [ "▁phenomenon", -12.148624420166016 ], [ "ICA", -12.148805618286133 ], [ "▁Ran", -12.148836135864258 ], [ "▁Schwarz", -12.148959159851074 ], [ "▁1983", -12.148985862731934 ], [ "ет", -12.148990631103516 ], [ "möglich", -12.149084091186523 ], [ "vocation", -12.149087905883789 ], [ "▁Organic", -12.14926815032959 ], [ "Oh", -12.149408340454102 ], [ "▁blockchain", -12.149422645568848 ], [ "▁Bă", -12.149515151977539 ], [ "▁Bass", -12.14953899383545 ], [ "enie", -12.149687767028809 ], [ "▁rêve", -12.149807929992676 ], [ "▁Rap", -12.149986267089844 ], [ "▁democratic", -12.150044441223145 ], [ "▁Chart", -12.150167465209961 ], [ "▁Voi", -12.150189399719238 ], [ "process", -12.150263786315918 ], [ "▁preach", -12.150389671325684 ], [ "tient", -12.150456428527832 ], [ "▁Train", -12.150468826293945 ], [ "▁Reihe", -12.150472640991211 ], [ "help", -12.150514602661133 ], [ "1.6", -12.150547981262207 ], [ "▁cazuri", -12.150547981262207 ], [ "▁chap", -12.150559425354004 ], [ "aktiv", -12.150632858276367 ], [ "▁2006.", -12.15079116821289 ], [ "iene", -12.150849342346191 ], [ "▁BBQ", -12.150969505310059 ], [ "dauer", -12.151028633117676 ], [ "2).", -12.151226997375488 ], [ "▁Monat", -12.151277542114258 ], [ "Generally", -12.151285171508789 ], [ "▁bracelet", -12.151336669921875 ], [ "▁cartoon", -12.151349067687988 ], [ "▁pui", -12.151488304138184 ], [ "temp", -12.151506423950195 ], [ "▁Particip", -12.151555061340332 ], [ "▁dumneavoastră", -12.151725769042969 ], [ "▁Gin", -12.151824951171875 ], [ "iunile", -12.151829719543457 ], [ "reise", -12.151849746704102 ], [ "▁einzige", -12.15189266204834 ], [ "ANCE", -12.15192985534668 ], [ "▁humble", -12.151951789855957 ], [ "claim", -12.152093887329102 ], [ "LV", -12.152143478393555 ], [ "▁confiance", -12.152270317077637 ], [ "▁Trading", -12.152535438537598 ], [ "▁Fabric", -12.152770042419434 ], [ "▁Duke", -12.152851104736328 ], [ "spieler", -12.152937889099121 ], [ "▁reject", -12.152987480163574 ], [ "▁crise", -12.153170585632324 ], [ "▁borders", -12.153196334838867 ], [ "▁Vehicle", -12.153279304504395 ], [ "zeiten", -12.153481483459473 ], [ "enrolled", -12.153514862060547 ], [ "venue", -12.153555870056152 ], [ "▁forests", -12.153564453125 ], [ "vascular", -12.15358829498291 ], [ "▁phrases", -12.153661727905273 ], [ "▁receptor", -12.15368366241455 ], [ "schied", -12.153687477111816 ], [ "▁soirée", -12.153785705566406 ], [ "▁partener", -12.153987884521484 ], [ "▁Jobs", -12.15417194366455 ], [ "▁segments", -12.154216766357422 ], [ "▁violate", -12.154438972473145 ], [ "▁viable", -12.154500007629395 ], [ "▁encountered", -12.154533386230469 ], [ "▁travelers", -12.154552459716797 ], [ "▁împ", -12.154679298400879 ], [ "▁convince", -12.154693603515625 ], [ "▁mailing", -12.154693603515625 ], [ "▁Zahn", -12.154698371887207 ], [ "attend", -12.15477466583252 ], [ "▁eBay", -12.154836654663086 ], [ "▁Emergency", -12.154844284057617 ], [ "wirtschaft", -12.154882431030273 ], [ "▁scholars", -12.154947280883789 ], [ "▁considerably", -12.155118942260742 ], [ "▁combo", -12.1551513671875 ], [ "hiver", -12.155198097229004 ], [ "▁mysterious", -12.15522575378418 ], [ "▁Degree", -12.155234336853027 ], [ "▁fate", -12.155242919921875 ], [ "▁transplant", -12.155281066894531 ], [ "▁samedi", -12.155400276184082 ], [ "unit", -12.155519485473633 ], [ "▁moyenne", -12.155611991882324 ], [ "▁Liverpool", -12.155614852905273 ], [ "▁Champions", -12.155728340148926 ], [ "zzle", -12.155824661254883 ], [ "▁arena", -12.156228065490723 ], [ "▁Pipe", -12.15633487701416 ], [ "▁waterproof", -12.156356811523438 ], [ "▁eternal", -12.156463623046875 ], [ "Whenever", -12.156503677368164 ], [ "▁Hop", -12.156535148620605 ], [ "▁Betrieb", -12.156816482543945 ], [ "gne", -12.15692138671875 ], [ "▁spe", -12.156975746154785 ], [ "▁Corner", -12.157078742980957 ], [ "▁devenir", -12.157118797302246 ], [ "ambiance", -12.157144546508789 ], [ "▁Graham", -12.157200813293457 ], [ "▁desires", -12.157289505004883 ], [ "▁Applications", -12.157291412353516 ], [ "▁genutzt", -12.157477378845215 ], [ "tek", -12.157612800598145 ], [ "▁Career", -12.157641410827637 ], [ "▁staple", -12.157695770263672 ], [ "▁Dodge", -12.157817840576172 ], [ "▁strictly", -12.157889366149902 ], [ "▁Gruppen", -12.157952308654785 ], [ "▁Finanz", -12.157981872558594 ], [ "▁sporting", -12.15809440612793 ], [ "▁Wieder", -12.158127784729004 ], [ "anny", -12.158208847045898 ], [ "▁bucura", -12.158233642578125 ], [ "▁Pest", -12.15824031829834 ], [ "▁circles", -12.158246994018555 ], [ "▁richtige", -12.158309936523438 ], [ "▁cycles", -12.158379554748535 ], [ "static", -12.15845012664795 ], [ "lasting", -12.15847396850586 ], [ "▁calcium", -12.158549308776855 ], [ "▁digest", -12.158697128295898 ], [ "Enfin", -12.158865928649902 ], [ "▁stressful", -12.158951759338379 ], [ "▁schemes", -12.158981323242188 ], [ "▁décision", -12.158987045288086 ], [ "▁comercial", -12.15907096862793 ], [ "işti", -12.159098625183105 ], [ "▁Comic", -12.15910816192627 ], [ "▁extensions", -12.159140586853027 ], [ "▁Sieg", -12.159168243408203 ], [ "▁pine", -12.15919017791748 ], [ "ieß", -12.159272193908691 ], [ "▁Images", -12.159427642822266 ], [ "▁Mensch", -12.159668922424316 ], [ "Pap", -12.159773826599121 ], [ "▁crops", -12.15994930267334 ], [ "▁sheep", -12.159996032714844 ], [ "▁istoric", -12.160001754760742 ], [ "▁Assessment", -12.160035133361816 ], [ "▁mounting", -12.16035270690918 ], [ "wirken", -12.160469055175781 ], [ "▁augment", -12.160469055175781 ], [ "▁picioare", -12.160542488098145 ], [ "organisme", -12.160590171813965 ], [ "▁Monitor", -12.16060733795166 ], [ "▁celles", -12.160642623901367 ], [ "▁Maison", -12.160709381103516 ], [ "notified", -12.160783767700195 ], [ "▁chew", -12.160831451416016 ], [ "▁bleu", -12.16083812713623 ], [ "dow", -12.160844802856445 ], [ "▁Grav", -12.16097354888916 ], [ "▁curtains", -12.160975456237793 ], [ "▁Campus", -12.161076545715332 ], [ "▁controversial", -12.161087036132812 ], [ "▁soutien", -12.161189079284668 ], [ "▁Dell", -12.1613187789917 ], [ "▁instrumental", -12.161431312561035 ], [ "▁Nan", -12.161514282226562 ], [ "▁prom", -12.161520957946777 ], [ "▁spatial", -12.161523818969727 ], [ "Similarly", -12.161558151245117 ], [ "▁Gala", -12.161601066589355 ], [ "ultimul", -12.16162109375 ], [ "▁Vom", -12.161761283874512 ], [ "▁Foot", -12.161784172058105 ], [ "bike", -12.1618013381958 ], [ "▁acids", -12.161979675292969 ], [ "entend", -12.162002563476562 ], [ "ivă", -12.162040710449219 ], [ "▁Weitere", -12.162124633789062 ], [ "▁vitamins", -12.162131309509277 ], [ "▁enhancement", -12.16234016418457 ], [ "▁Cruise", -12.162367820739746 ], [ "assemble", -12.162385940551758 ], [ "▁spécifique", -12.162459373474121 ], [ "affaires", -12.16261100769043 ], [ "▁indispensable", -12.1626558303833 ], [ "▁logistics", -12.16283130645752 ], [ "▁manche", -12.162919044494629 ], [ "▁dealt", -12.16297435760498 ], [ "▁favorable", -12.163036346435547 ], [ "▁unwanted", -12.163047790527344 ], [ "▁handmade", -12.163065910339355 ], [ "▁Regi", -12.163102149963379 ], [ "safe", -12.163134574890137 ], [ "persoanele", -12.163202285766602 ], [ "▁destinat", -12.163252830505371 ], [ "▁Maxi", -12.163299560546875 ], [ "▁salmon", -12.163454055786133 ], [ "wag", -12.163578033447266 ], [ "210", -12.163769721984863 ], [ "▁warned", -12.163865089416504 ], [ "läuft", -12.16386604309082 ], [ "agging", -12.163931846618652 ], [ "▁responsabil", -12.16398811340332 ], [ "▁presse", -12.164271354675293 ], [ "▁amis", -12.164305686950684 ], [ "▁rolls", -12.164377212524414 ], [ "control", -12.164405822753906 ], [ "▁Manufacturer", -12.164422988891602 ], [ "hnen", -12.164449691772461 ], [ "▁buget", -12.164546012878418 ], [ "OW", -12.16467571258545 ], [ "etro", -12.164745330810547 ], [ "▁communauté", -12.164837837219238 ], [ "unci", -12.164944648742676 ], [ "▁Chine", -12.164952278137207 ], [ "combines", -12.16501235961914 ], [ "▁learners", -12.165046691894531 ], [ "STE", -12.165055274963379 ], [ "ckel", -12.16511344909668 ], [ "Service", -12.165169715881348 ], [ "▁veröffentlicht", -12.165209770202637 ], [ "besides", -12.165266036987305 ], [ "getragen", -12.165349960327148 ], [ "▁opponent", -12.165521621704102 ], [ "▁volum", -12.165533065795898 ], [ "▁confusing", -12.165802001953125 ], [ "invasive", -12.165813446044922 ], [ "▁conseils", -12.165881156921387 ], [ "▁vibe", -12.165928840637207 ], [ "View", -12.166062355041504 ], [ "oară", -12.166086196899414 ], [ "Link", -12.166261672973633 ], [ "▁holy", -12.166261672973633 ], [ "▁crema", -12.16629409790039 ], [ "▁Michelle", -12.166303634643555 ], [ "▁Wien", -12.166383743286133 ], [ "▁undertake", -12.166404724121094 ], [ "▁Photograph", -12.166421890258789 ], [ "humain", -12.16645336151123 ], [ "▁Hang", -12.166545867919922 ], [ "designed", -12.16657829284668 ], [ "▁analyses", -12.166614532470703 ], [ "▁compose", -12.166653633117676 ], [ "▁substantially", -12.166765213012695 ], [ "▁marking", -12.166772842407227 ], [ "▁campagne", -12.166826248168945 ], [ "▁$15", -12.166828155517578 ], [ "pharma", -12.166972160339355 ], [ "▁playoff", -12.1669921875 ], [ "▁momentum", -12.167091369628906 ], [ "Temp", -12.16714096069336 ], [ "▁vinegar", -12.167143821716309 ], [ "▁descriptions", -12.167581558227539 ], [ "christ", -12.167656898498535 ], [ "wore", -12.16773509979248 ], [ "ITY", -12.167768478393555 ], [ "stehen", -12.167771339416504 ], [ "▁insulation", -12.1677827835083 ], [ "grav", -12.167842864990234 ], [ "2.2", -12.167887687683105 ], [ "▁Explore", -12.168028831481934 ], [ "▁dye", -12.168127059936523 ], [ "stair", -12.168155670166016 ], [ "artisan", -12.168207168579102 ], [ "▁zoom", -12.168285369873047 ], [ "▁turkey", -12.168573379516602 ], [ "▁locksmith", -12.168577194213867 ], [ "▁sewing", -12.168610572814941 ], [ "▁modeling", -12.168627738952637 ], [ "lied", -12.16870403289795 ], [ "adel", -12.168773651123047 ], [ "▁Going", -12.168785095214844 ], [ "WH", -12.168798446655273 ], [ "▁deserves", -12.168919563293457 ], [ "▁arriving", -12.168960571289062 ], [ "OFF", -12.169039726257324 ], [ "torului", -12.169109344482422 ], [ "ucked", -12.16921615600586 ], [ "▁approached", -12.169351577758789 ], [ "▁élevé", -12.169354438781738 ], [ "▁quotidien", -12.169416427612305 ], [ "▁derzeit", -12.16942024230957 ], [ "nutzt", -12.169656753540039 ], [ "science", -12.169729232788086 ], [ "▁Emma", -12.169841766357422 ], [ "▁builds", -12.169879913330078 ], [ "▁Logo", -12.169949531555176 ], [ "▁clouds", -12.170061111450195 ], [ "inflammatory", -12.170141220092773 ], [ "țiuni", -12.170199394226074 ], [ "▁Cisco", -12.17025089263916 ], [ "▁würden", -12.170254707336426 ], [ "▁Shaw", -12.170256614685059 ], [ "▁Ell", -12.170266151428223 ], [ "avance", -12.1703519821167 ], [ "anglais", -12.170365333557129 ], [ "weil", -12.170368194580078 ], [ "▁singura", -12.170464515686035 ], [ "ACK", -12.170489311218262 ], [ "likewise", -12.170522689819336 ], [ "ographie", -12.170646667480469 ], [ "liegen", -12.17088508605957 ], [ "▁Crow", -12.170964241027832 ], [ "▁unic", -12.171187400817871 ], [ "▁Ale", -12.171241760253906 ], [ "▁păstr", -12.17125129699707 ], [ "▁informal", -12.171337127685547 ], [ "650", -12.17136287689209 ], [ "Benz", -12.171489715576172 ], [ "▁antenna", -12.171540260314941 ], [ "▁pagini", -12.171552658081055 ], [ "▁lansat", -12.171561241149902 ], [ "▁Fans", -12.171576499938965 ], [ "taine", -12.171822547912598 ], [ "JO", -12.171853065490723 ], [ "▁Tips", -12.172091484069824 ], [ "cir", -12.172130584716797 ], [ "nou", -12.172384262084961 ], [ "▁planted", -12.17241382598877 ], [ "▁steering", -12.172423362731934 ], [ "▁Waren", -12.172475814819336 ], [ "▁clearance", -12.172515869140625 ], [ "▁Moscow", -12.172516822814941 ], [ "▁Faith", -12.172534942626953 ], [ "▁Pizza", -12.172572135925293 ], [ "▁Tank", -12.17273998260498 ], [ "QUE", -12.172783851623535 ], [ "▁studii", -12.172804832458496 ], [ "éné", -12.172829627990723 ], [ "▁guerre", -12.1728515625 ], [ "▁celebr", -12.173083305358887 ], [ "▁Factory", -12.173111915588379 ], [ "▁Browse", -12.173198699951172 ], [ "▁Request", -12.17323112487793 ], [ "▁taxpayer", -12.173311233520508 ], [ "▁assert", -12.173562049865723 ], [ "unternehmen", -12.173588752746582 ], [ "▁Ergebnis", -12.173687934875488 ], [ "▁Antwort", -12.173727035522461 ], [ "▁Photography", -12.173808097839355 ], [ "▁plă", -12.173866271972656 ], [ "IME", -12.173982620239258 ], [ "▁prochaine", -12.174074172973633 ], [ "ajouter", -12.174103736877441 ], [ "▁buffet", -12.174227714538574 ], [ "▁pixels", -12.174239158630371 ], [ "▁pledge", -12.174250602722168 ], [ "▁Inhalt", -12.17435359954834 ], [ "▁chase", -12.174384117126465 ], [ "Flow", -12.174493789672852 ], [ "▁melodi", -12.174872398376465 ], [ "▁Abu", -12.174991607666016 ], [ "▁1979", -12.175042152404785 ], [ "▁Photos", -12.175042152404785 ], [ "▁qualifications", -12.175148963928223 ], [ "▁zis", -12.175213813781738 ], [ "IAL", -12.175354957580566 ], [ "▁lender", -12.175390243530273 ], [ "▁indiferent", -12.175494194030762 ], [ "▁behaviors", -12.175506591796875 ], [ "▁flowing", -12.175531387329102 ], [ "▁zweite", -12.1756010055542 ], [ "abl", -12.175765037536621 ], [ "Schw", -12.176004409790039 ], [ "opi", -12.176030158996582 ], [ "ggi", -12.176164627075195 ], [ "▁depart", -12.176314353942871 ], [ "▁garde", -12.17640209197998 ], [ "▁tuition", -12.176490783691406 ], [ "fälle", -12.17650032043457 ], [ "▁determina", -12.17652702331543 ], [ "▁spice", -12.176627159118652 ], [ "▁petites", -12.176777839660645 ], [ "kot", -12.176973342895508 ], [ "▁intersection", -12.177242279052734 ], [ "hak", -12.177248001098633 ], [ "▁autumn", -12.177284240722656 ], [ "▁verbunden", -12.177284240722656 ], [ "▁ferme", -12.177287101745605 ], [ "PN", -12.17733097076416 ], [ "▁insurer", -12.177390098571777 ], [ "arten", -12.177401542663574 ], [ "▁Turkish", -12.177715301513672 ], [ "▁shoulders", -12.177732467651367 ], [ "=>", -12.177742004394531 ], [ "▁Nike", -12.177760124206543 ], [ "uire", -12.177763938903809 ], [ "▁Chile", -12.177811622619629 ], [ "jon", -12.177842140197754 ], [ "▁fragrance", -12.177884101867676 ], [ "▁bean", -12.177908897399902 ], [ "ips", -12.178108215332031 ], [ "assuming", -12.178191184997559 ], [ "liens", -12.178215026855469 ], [ "tocmai", -12.178267478942871 ], [ "▁60%", -12.178301811218262 ], [ "ipped", -12.178384780883789 ], [ "DIS", -12.178473472595215 ], [ "▁predicted", -12.178537368774414 ], [ "▁Picture", -12.178555488586426 ], [ "Bahn", -12.178796768188477 ], [ "104", -12.178854942321777 ], [ "tended", -12.178958892822266 ], [ "▁approve", -12.179031372070312 ], [ "▁magasin", -12.17908000946045 ], [ "▁mindset", -12.179208755493164 ], [ "rase", -12.179363250732422 ], [ "grand", -12.179469108581543 ], [ "▁Principal", -12.17947769165039 ], [ "▁informații", -12.17959976196289 ], [ "▁legătur", -12.179628372192383 ], [ "▁Farb", -12.179692268371582 ], [ "▁Dieu", -12.179710388183594 ], [ "▁alliance", -12.180378913879395 ], [ "weiligen", -12.180397987365723 ], [ "▁Câ", -12.18048095703125 ], [ "▁counseling", -12.180521011352539 ], [ "▁traveled", -12.180533409118652 ], [ "▁translated", -12.180558204650879 ], [ "▁carne", -12.180679321289062 ], [ "aked", -12.180707931518555 ], [ "▁LCD", -12.180868148803711 ], [ "▁Folge", -12.180909156799316 ], [ "▁Erfahrungen", -12.18093204498291 ], [ "▁1981", -12.18106460571289 ], [ "▁răspuns", -12.181075096130371 ], [ "itori", -12.18117618560791 ], [ "▁elementary", -12.181200981140137 ], [ "▁vorbei", -12.18127727508545 ], [ "▁cargo", -12.181361198425293 ], [ "disciplinary", -12.18140983581543 ], [ "WR", -12.181492805480957 ], [ "▁counterpart", -12.18162727355957 ], [ "family", -12.181641578674316 ], [ "▁viață", -12.181644439697266 ], [ "▁Definition", -12.18167495727539 ], [ "▁Cow", -12.18171501159668 ], [ "fällig", -12.182003021240234 ], [ "▁Sicht", -12.182025909423828 ], [ "▁mum", -12.182145118713379 ], [ "▁Mediterranean", -12.182275772094727 ], [ "nev", -12.182278633117676 ], [ "bü", -12.182293891906738 ], [ "▁slave", -12.182293891906738 ], [ "schnitt", -12.18233871459961 ], [ "▁firme", -12.182430267333984 ], [ "▁spill", -12.182454109191895 ], [ "▁wages", -12.182592391967773 ], [ "▁refine", -12.182615280151367 ], [ "▁upgraded", -12.182632446289062 ], [ "▁gospel", -12.182698249816895 ], [ "▁quartier", -12.182744979858398 ], [ "▁ -12.182772636413574 ], [ "▁Situation", -12.18298625946045 ], [ "▁suggesting", -12.183075904846191 ], [ "▁acne", -12.183113098144531 ], [ "▁Murray", -12.183337211608887 ], [ "▁Ian", -12.183469772338867 ], [ "hören", -12.183489799499512 ], [ "bia", -12.183603286743164 ], [ "▁Bewegung", -12.183684349060059 ], [ "▁abzu", -12.18379020690918 ], [ "reveals", -12.183795928955078 ], [ "friend", -12.184025764465332 ], [ "▁Connecticut", -12.18407917022705 ], [ "▁Testament", -12.184151649475098 ], [ "▁Lit", -12.184199333190918 ], [ "▁Ship", -12.184209823608398 ], [ "▁minunat", -12.184344291687012 ], [ "▁Moving", -12.184346199035645 ], [ "▁Device", -12.184486389160156 ], [ "▁Bake", -12.18453598022461 ], [ "▁qualification", -12.184633255004883 ], [ "▁challenged", -12.184640884399414 ], [ "▁Hinweis", -12.184721946716309 ], [ "▁sechs", -12.184769630432129 ], [ "та", -12.184903144836426 ], [ "120", -12.184904098510742 ], [ "licht", -12.184940338134766 ], [ "▁supervision", -12.185022354125977 ], [ "▁milestone", -12.18503189086914 ], [ "zeig", -12.185050964355469 ], [ "▁emphasize", -12.185224533081055 ], [ "▁complain", -12.185232162475586 ], [ "sack", -12.185341835021973 ], [ "▁rebuild", -12.185445785522461 ], [ "projekt", -12.18548583984375 ], [ "▁saint", -12.185644149780273 ], [ "lette", -12.185752868652344 ], [ "rade", -12.18580150604248 ], [ "▁pacient", -12.185893058776855 ], [ "signed", -12.186169624328613 ], [ "▁mil", -12.186261177062988 ], [ "cali", -12.186266899108887 ], [ "▁brochure", -12.186487197875977 ], [ "▁Bulgaria", -12.186488151550293 ], [ "Har", -12.186623573303223 ], [ "DH", -12.186697006225586 ], [ "▁jumping", -12.186712265014648 ], [ "ären", -12.186732292175293 ], [ "▁tactics", -12.186911582946777 ], [ "▁soleil", -12.187030792236328 ], [ "lessness", -12.18705940246582 ], [ "steigen", -12.187085151672363 ], [ "▁Brief", -12.187117576599121 ], [ "▁Oz", -12.18718433380127 ], [ "credit", -12.187239646911621 ], [ "glass", -12.187241554260254 ], [ "▁Baltimore", -12.187292098999023 ], [ "varies", -12.187445640563965 ], [ "sourced", -12.187575340270996 ], [ "▁documented", -12.187604904174805 ], [ "▁devine", -12.187664985656738 ], [ "möglichst", -12.187732696533203 ], [ "▁früher", -12.187756538391113 ], [ "outefois", -12.18790054321289 ], [ "▁Engagement", -12.187934875488281 ], [ "▁anumit", -12.18806266784668 ], [ "▁1930", -12.188186645507812 ], [ "▁Aufgaben", -12.188214302062988 ], [ "▁lineup", -12.188227653503418 ], [ "▁Cad", -12.188349723815918 ], [ "améliorer", -12.188437461853027 ], [ "▁februarie", -12.188499450683594 ], [ "▁cancellation", -12.188529968261719 ], [ "▁locks", -12.188577651977539 ], [ "▁modèles", -12.188711166381836 ], [ "▁breakdown", -12.188748359680176 ], [ "Ticket", -12.188810348510742 ], [ "▁Chen", -12.188855171203613 ], [ "▁Competition", -12.188910484313965 ], [ "▁median", -12.18896770477295 ], [ "rische", -12.189159393310547 ], [ "▁multipli", -12.189269065856934 ], [ "▁Belgium", -12.189305305480957 ], [ "▁Physical", -12.189308166503906 ], [ "▁parameter", -12.189432144165039 ], [ "▁carrot", -12.189435005187988 ], [ "▁mandat", -12.189617156982422 ], [ "▁towel", -12.189697265625 ], [ "▁insured", -12.189825057983398 ], [ "PRI", -12.189868927001953 ], [ "etter", -12.189915657043457 ], [ "▁Oder", -12.190083503723145 ], [ "argued", -12.190171241760254 ], [ "FB", -12.190196990966797 ], [ "versicherung", -12.190197944641113 ], [ "abila", -12.190251350402832 ], [ "▁Coin", -12.190324783325195 ], [ "around", -12.19050121307373 ], [ "▁Lorsqu", -12.190773963928223 ], [ "valent", -12.190918922424316 ], [ "▁weltweit", -12.19092082977295 ], [ "Mod", -12.191039085388184 ], [ "▁defect", -12.191044807434082 ], [ "ibly", -12.191136360168457 ], [ "▁Juan", -12.191153526306152 ], [ "▁Jur", -12.191171646118164 ], [ "large", -12.191307067871094 ], [ "▁indicators", -12.191461563110352 ], [ "invest", -12.19168472290039 ], [ "▁rehabilitation", -12.191705703735352 ], [ "nag", -12.191823959350586 ], [ "▁Grundlage", -12.191829681396484 ], [ "▁Strategy", -12.192131042480469 ], [ "▁supérieur", -12.192173957824707 ], [ "▁orbit", -12.192281723022461 ], [ "▁Auftrag", -12.192360877990723 ], [ "▁Verb", -12.192441940307617 ], [ "ANA", -12.19256591796875 ], [ "▁trimis", -12.192611694335938 ], [ "▁Rub", -12.192704200744629 ], [ "institu", -12.192732810974121 ], [ "▁inspect", -12.1927490234375 ], [ "▁Princess", -12.192757606506348 ], [ "especially", -12.192777633666992 ], [ "▁combinations", -12.192793846130371 ], [ "▁gaze", -12.192842483520508 ], [ "elemente", -12.192970275878906 ], [ "deal", -12.192980766296387 ], [ "polis", -12.193157196044922 ], [ "shaw", -12.193168640136719 ], [ "▁Republicans", -12.193203926086426 ], [ "aded", -12.193244934082031 ], [ "▁Louisiana", -12.193364143371582 ], [ "▁Ville", -12.193368911743164 ], [ "▁afterwards", -12.193389892578125 ], [ "ONG", -12.193608283996582 ], [ "▁dryer", -12.193636894226074 ], [ "▁Manhattan", -12.19374942779541 ], [ "▁recomanda", -12.19412612915039 ], [ "▁juca", -12.194253921508789 ], [ "▁Crown", -12.194260597229004 ], [ "▁flesh", -12.194347381591797 ], [ "sichtig", -12.194358825683594 ], [ "▁rempli", -12.19437026977539 ], [ "▁deposits", -12.19438362121582 ], [ "▁Voll", -12.194599151611328 ], [ "▁analysts", -12.194672584533691 ], [ "▁Krieg", -12.19484806060791 ], [ "▁Rosa", -12.19495964050293 ], [ "▁Supply", -12.194964408874512 ], [ "GF", -12.19497013092041 ], [ "idad", -12.195098876953125 ], [ "▁flush", -12.195103645324707 ], [ "▁circular", -12.195355415344238 ], [ "▁național", -12.195379257202148 ], [ "▁lorsqu", -12.195441246032715 ], [ "▁analyst", -12.195459365844727 ], [ "▁Jahrhundert", -12.195586204528809 ], [ "▁biology", -12.195713996887207 ], [ "copy", -12.195733070373535 ], [ "▁bringt", -12.195765495300293 ], [ "▁Gospel", -12.195780754089355 ], [ "▁sorgen", -12.195842742919922 ], [ "zeichnung", -12.196181297302246 ], [ "chair", -12.196197509765625 ], [ "EB", -12.19636344909668 ], [ "▁Beth", -12.1964111328125 ], [ "115", -12.196416854858398 ], [ "▁Neue", -12.196479797363281 ], [ "▁faible", -12.196599960327148 ], [ "▁methodology", -12.196603775024414 ], [ "spiele", -12.196647644042969 ], [ "▁cherry", -12.196727752685547 ], [ "▁Mak", -12.196802139282227 ], [ "▁volet", -12.196982383728027 ], [ "funk", -12.197196006774902 ], [ "▁aktuelle", -12.197372436523438 ], [ "▁Yahoo", -12.197408676147461 ], [ "▁Zusammenarbeit", -12.197669982910156 ], [ "▁Serve", -12.197754859924316 ], [ "▁simpler", -12.197978019714355 ], [ "intégr", -12.197990417480469 ], [ "ndlich", -12.198083877563477 ], [ "▁actress", -12.198320388793945 ], [ "▁reuse", -12.198332786560059 ], [ "▁reviewing", -12.198405265808105 ], [ "statt", -12.198457717895508 ], [ "▁diving", -12.198469161987305 ], [ "▁Național", -12.198677062988281 ], [ "voi", -12.19873332977295 ], [ "Disc", -12.198812484741211 ], [ "▁Mineral", -12.19886302947998 ], [ "▁emit", -12.199007034301758 ], [ "witz", -12.199078559875488 ], [ "▁forgot", -12.19909954071045 ], [ "▁dim", -12.199115753173828 ], [ "upper", -12.19947624206543 ], [ "sichtlich", -12.19949722290039 ], [ "▁parcours", -12.199670791625977 ], [ "8:00", -12.199697494506836 ], [ "▁keyword", -12.199701309204102 ], [ "▁upgrades", -12.199763298034668 ], [ "kunden", -12.200177192687988 ], [ "▁Seg", -12.200257301330566 ], [ "▁Circle", -12.200289726257324 ], [ "▁ginger", -12.200336456298828 ], [ "mment", -12.200516700744629 ], [ "▁expenditure", -12.200655937194824 ], [ "▁parle", -12.200693130493164 ], [ "▁Counsel", -12.200722694396973 ], [ "▁Gui", -12.200722694396973 ], [ "resident", -12.20103645324707 ], [ "▁benchmark", -12.20103931427002 ], [ "▁Elektro", -12.201064109802246 ], [ "▁réalité", -12.201064109802246 ], [ "▁ridiculous", -12.201067924499512 ], [ "▁necklace", -12.20108699798584 ], [ "nian", -12.201117515563965 ], [ "▁Move", -12.20113468170166 ], [ "▁elevated", -12.201204299926758 ], [ "WE", -12.201281547546387 ], [ "▁Drum", -12.20132064819336 ], [ "▁Delivery", -12.201350212097168 ], [ "indicating", -12.201452255249023 ], [ "▁Benjamin", -12.201472282409668 ], [ "▁Samuel", -12.2014741897583 ], [ "bene", -12.201666831970215 ], [ "▁experienta", -12.201676368713379 ], [ "▁rocket", -12.201839447021484 ], [ "▁fossil", -12.201883316040039 ], [ "▁festive", -12.20193099975586 ], [ "▁conscience", -12.201964378356934 ], [ "▁bacon", -12.202136993408203 ], [ "▁aero", -12.202159881591797 ], [ "public", -12.202187538146973 ], [ "▁zic", -12.202218055725098 ], [ "ombre", -12.202356338500977 ], [ "▁Drain", -12.202550888061523 ], [ "7.5", -12.202672004699707 ], [ "▁Deutschen", -12.202703475952148 ], [ "reportedly", -12.202754974365234 ], [ "▁Français", -12.203105926513672 ], [ "▁enzyme", -12.203106880187988 ], [ "▁inquiry", -12.203117370605469 ], [ "▁presque", -12.203193664550781 ], [ "▁Airlines", -12.203228950500488 ], [ "▁Salon", -12.203237533569336 ], [ "▁Volunteer", -12.203310012817383 ], [ "▁modular", -12.203349113464355 ], [ "ón", -12.203364372253418 ], [ "NH", -12.203449249267578 ], [ "▁souhaite", -12.203516960144043 ], [ "social", -12.203659057617188 ], [ "▁Include", -12.203729629516602 ], [ "▁Decor", -12.2037992477417 ], [ "dded", -12.203965187072754 ], [ "▁Außen", -12.203969955444336 ], [ "rendu", -12.20412540435791 ], [ "▁MBA", -12.204150199890137 ], [ "▁columns", -12.204155921936035 ], [ "▁Wing", -12.204436302185059 ], [ "▁landmark", -12.204442977905273 ], [ "schritt", -12.204594612121582 ], [ "▁désir", -12.204630851745605 ], [ "(5)", -12.204680442810059 ], [ "▁réseaux", -12.204693794250488 ], [ "income", -12.204710960388184 ], [ "▁revised", -12.204819679260254 ], [ "HY", -12.204863548278809 ], [ "▁Explorer", -12.204873085021973 ], [ "▁Lam", -12.204877853393555 ], [ "▁almond", -12.204910278320312 ], [ "▁faux", -12.204910278320312 ], [ "opt", -12.204923629760742 ], [ "Out", -12.204939842224121 ], [ "▁virtue", -12.205025672912598 ], [ "▁Chocolate", -12.205151557922363 ], [ "▁spannend", -12.205305099487305 ], [ "▁spices", -12.205327033996582 ], [ "▁Climate", -12.205560684204102 ], [ "▁Residential", -12.205560684204102 ], [ "gung", -12.205700874328613 ], [ "▁filtr", -12.20606803894043 ], [ "circ", -12.206123352050781 ], [ "sisted", -12.206172943115234 ], [ "▁dedicat", -12.206243515014648 ], [ "▁foil", -12.206387519836426 ], [ "▁uita", -12.206392288208008 ], [ "▁lié", -12.206402778625488 ], [ "▁Demo", -12.206409454345703 ], [ "▁spoil", -12.2064208984375 ], [ "Cu", -12.206448554992676 ], [ "naut", -12.206525802612305 ], [ "▁configured", -12.206535339355469 ], [ "UK", -12.206543922424316 ], [ "▁disagree", -12.20656967163086 ], [ "Medic", -12.206767082214355 ], [ "cosm", -12.207074165344238 ], [ "Toute", -12.207109451293945 ], [ "▁beneficia", -12.207170486450195 ], [ "fassen", -12.207327842712402 ], [ "▁bail", -12.207337379455566 ], [ "igue", -12.207439422607422 ], [ "▁Mă", -12.20744800567627 ], [ "▁strips", -12.20748519897461 ], [ "▁Dritte", -12.207537651062012 ], [ "▁putere", -12.207597732543945 ], [ "Play", -12.20763111114502 ], [ "▁Samstag", -12.207632064819336 ], [ "▁households", -12.207791328430176 ], [ "▁persistent", -12.207914352416992 ], [ "uben", -12.207942962646484 ], [ "Web", -12.20809555053711 ], [ "▁scenery", -12.20820140838623 ], [ "▁défini", -12.208257675170898 ], [ "news", -12.208337783813477 ], [ "eira", -12.208428382873535 ], [ "▁Mumbai", -12.208438873291016 ], [ "▁Ward", -12.208558082580566 ], [ "▁ladder", -12.2086181640625 ], [ "▁plaque", -12.208623886108398 ], [ "nés", -12.208639144897461 ], [ "▁condamn", -12.20864486694336 ], [ "▁attribute", -12.208687782287598 ], [ "atti", -12.20873737335205 ], [ "▁Emily", -12.208953857421875 ], [ "▁pleine", -12.20896053314209 ], [ "▁automatisch", -12.209004402160645 ], [ "ifies", -12.209052085876465 ], [ "onna", -12.209104537963867 ], [ "▁inject", -12.209157943725586 ], [ "▁evolve", -12.209297180175781 ], [ "▁breeze", -12.209299087524414 ], [ "▁montre", -12.209415435791016 ], [ "▁memorial", -12.209425926208496 ], [ "ämlich", -12.209465026855469 ], [ "NBC", -12.209589958190918 ], [ "▁1940", -12.209836959838867 ], [ "▁trouvé", -12.209892272949219 ], [ "when", -12.209914207458496 ], [ "▁Büro", -12.209959983825684 ], [ "▁probability", -12.209978103637695 ], [ "cute", -12.21006965637207 ], [ "▁sturdy", -12.210078239440918 ], [ "AMP", -12.210165023803711 ], [ "▁Constantin", -12.210283279418945 ], [ "▁batter", -12.21037483215332 ], [ "▁bist", -12.210470199584961 ], [ "▁streams", -12.210528373718262 ], [ "rushing", -12.21057415008545 ], [ "▁shaft", -12.21065902709961 ], [ "▁proprii", -12.210722923278809 ], [ "émi", -12.21074390411377 ], [ "online", -12.210817337036133 ], [ "▁vanity", -12.210870742797852 ], [ "▁mural", -12.210878372192383 ], [ "▁distinguish", -12.210905075073242 ], [ "▁niciun", -12.211191177368164 ], [ "▁européenne", -12.211252212524414 ], [ "▁secretary", -12.211289405822754 ], [ "▁gaps", -12.211492538452148 ], [ "▁realm", -12.211499214172363 ], [ "▁elastic", -12.211504936218262 ], [ "▁Avoid", -12.211519241333008 ], [ "▁mauvais", -12.211931228637695 ], [ "▁innovations", -12.212663650512695 ], [ "▁suprem", -12.212776184082031 ], [ "▁vederea", -12.212817192077637 ], [ "wenden", -12.212892532348633 ], [ "-40", -12.213075637817383 ], [ "prenant", -12.213155746459961 ], [ "utilisateur", -12.213210105895996 ], [ "▁Oliver", -12.213228225708008 ], [ "111", -12.21326732635498 ], [ "▁manifestation", -12.213382720947266 ], [ "▁Rachel", -12.213458061218262 ], [ "agog", -12.21348762512207 ], [ "▁seamless", -12.213534355163574 ], [ "▁Employee", -12.213576316833496 ], [ "▁dimanche", -12.213582038879395 ], [ "▁banii", -12.213631629943848 ], [ "▁Ruth", -12.213781356811523 ], [ "▁Roy", -12.21385383605957 ], [ "▁homeless", -12.2139253616333 ], [ "▁Lower", -12.213932037353516 ], [ "health", -12.21393871307373 ], [ "▁atenti", -12.2140474319458 ], [ "▁touched", -12.214183807373047 ], [ "May", -12.214195251464844 ], [ "▁Buc", -12.214225769042969 ], [ "▁explored", -12.214393615722656 ], [ "▁declare", -12.214461326599121 ], [ "▁garment", -12.214469909667969 ], [ "▁buzz", -12.214483261108398 ], [ "▁rappel", -12.214662551879883 ], [ "▁uscat", -12.214903831481934 ], [ "▁Hyper", -12.214914321899414 ], [ "Etat", -12.215007781982422 ], [ "▁Titel", -12.215035438537598 ], [ "product", -12.215191841125488 ], [ "woman", -12.215280532836914 ], [ "▁Gab", -12.215450286865234 ], [ "▁advances", -12.215615272521973 ], [ "2/", -12.215753555297852 ], [ "prone", -12.215770721435547 ], [ "kö", -12.215986251831055 ], [ "▁counting", -12.21599292755127 ], [ "Sollte", -12.216043472290039 ], [ "▁Konzept", -12.216063499450684 ], [ "▁backgrounds", -12.216153144836426 ], [ "jährige", -12.216154098510742 ], [ "▁Alltag", -12.216187477111816 ], [ "▁metrics", -12.21619701385498 ], [ "▁illustrated", -12.216222763061523 ], [ "▁Charge", -12.21631908416748 ], [ "▁thoughtful", -12.216423034667969 ], [ "gesetz", -12.216527938842773 ], [ "pfen", -12.216611862182617 ], [ "▁déroul", -12.216713905334473 ], [ "▁checkout", -12.216876029968262 ], [ "quette", -12.216936111450195 ], [ "▁pierdut", -12.2170991897583 ], [ "▁Seat", -12.217140197753906 ], [ "▁linen", -12.217193603515625 ], [ "archiv", -12.217245101928711 ], [ "arna", -12.217254638671875 ], [ "importe", -12.21742057800293 ], [ "▁PHP", -12.217496871948242 ], [ "▁Parents", -12.217503547668457 ], [ "▁Birmingham", -12.217513084411621 ], [ "▁Integr", -12.217588424682617 ], [ "▁Mason", -12.217607498168945 ], [ "zieht", -12.217781066894531 ], [ "▁camps", -12.217803001403809 ], [ "OG", -12.21786117553711 ], [ "▁syrup", -12.217927932739258 ], [ "▁Cookies", -12.217928886413574 ], [ "▁Comfort", -12.217955589294434 ], [ "ută", -12.217976570129395 ], [ "abia", -12.217979431152344 ], [ "zeci", -12.218003273010254 ], [ "▁Gardens", -12.218009948730469 ], [ "▁incidents", -12.218149185180664 ], [ "▁participat", -12.218235969543457 ], [ "▁glimpse", -12.218342781066895 ], [ "5.5", -12.218437194824219 ], [ "▁dealers", -12.218469619750977 ], [ "▁Grande", -12.218565940856934 ], [ "▁raid", -12.218944549560547 ], [ "owing", -12.21903133392334 ], [ "▁contrary", -12.219109535217285 ], [ "Earlier", -12.219138145446777 ], [ "tien", -12.21916389465332 ], [ "drop", -12.219169616699219 ], [ "▁angajat", -12.219359397888184 ], [ "▁procesul", -12.219515800476074 ], [ "▁focal", -12.219564437866211 ], [ "▁impart", -12.219703674316406 ], [ "▁Abschluss", -12.219749450683594 ], [ "carui", -12.219830513000488 ], [ "insul", -12.220277786254883 ], [ "▁creamy", -12.220283508300781 ], [ "eille", -12.22032356262207 ], [ "suppl", -12.220335960388184 ], [ "▁Heaven", -12.220471382141113 ], [ "éna", -12.220667839050293 ], [ "▁swap", -12.220739364624023 ], [ "▁vreau", -12.220762252807617 ], [ "▁Bryan", -12.220809936523438 ], [ "▁Zug", -12.220815658569336 ], [ "▁glance", -12.220848083496094 ], [ "▁elimin", -12.220900535583496 ], [ "▁yeux", -12.221084594726562 ], [ "wehr", -12.221238136291504 ], [ "2.5", -12.221287727355957 ], [ "▁poses", -12.221364974975586 ], [ "▁parcel", -12.221585273742676 ], [ "▁Apartment", -12.221749305725098 ], [ "▁NASA", -12.221768379211426 ], [ "▁bénéfici", -12.22187614440918 ], [ "▁Umgebung", -12.221890449523926 ], [ "asia", -12.221946716308594 ], [ "abi", -12.221967697143555 ], [ "coup", -12.222002983093262 ], [ "synchron", -12.222017288208008 ], [ "▁Sicherheits", -12.222029685974121 ], [ "bic", -12.222076416015625 ], [ "▁distract", -12.222148895263672 ], [ "▁rentals", -12.222163200378418 ], [ "constru", -12.222290992736816 ], [ "curs", -12.222345352172852 ], [ "genannten", -12.222386360168457 ], [ "▁Shanghai", -12.222501754760742 ], [ "▁vague", -12.222504615783691 ], [ "▁Leather", -12.22250747680664 ], [ "▁Vintage", -12.222532272338867 ], [ "pointing", -12.22259521484375 ], [ "avant", -12.22268295288086 ], [ "gues", -12.222949028015137 ], [ "sweise", -12.22302532196045 ], [ "▁Greater", -12.223065376281738 ], [ "fig", -12.22310733795166 ], [ "▁Blut", -12.223217964172363 ], [ "▁Stellen", -12.22326946258545 ], [ "▁isolation", -12.22337818145752 ], [ "▁overhead", -12.22338581085205 ], [ "▁wondered", -12.223508834838867 ], [ "essai", -12.223609924316406 ], [ "aves", -12.2236328125 ], [ "▁Shore", -12.223637580871582 ], [ "▁INC", -12.223709106445312 ], [ "rufen", -12.223980903625488 ], [ "▁magnifique", -12.224069595336914 ], [ "▁intéressant", -12.224072456359863 ], [ "▁tanks", -12.224075317382812 ], [ "▁Tun", -12.224367141723633 ], [ "▁approaching", -12.224390029907227 ], [ "▁relay", -12.224479675292969 ], [ "▁Küche", -12.224529266357422 ], [ "describing", -12.224587440490723 ], [ "▁Certification", -12.224588394165039 ], [ "▁Breakfast", -12.224597930908203 ], [ "▁Frame", -12.224891662597656 ], [ "▁Stoff", -12.224909782409668 ], [ "▁victime", -12.224924087524414 ], [ "Observ", -12.224943161010742 ], [ "▁gutter", -12.224989891052246 ], [ "standard", -12.225220680236816 ], [ "▁Sci", -12.225244522094727 ], [ "▁sept", -12.225377082824707 ], [ "▁Potter", -12.225423812866211 ], [ "letter", -12.22577953338623 ], [ "▁tobacco", -12.225852012634277 ], [ "▁threatened", -12.22591781616211 ], [ "MW", -12.225936889648438 ], [ "▁Cher", -12.225944519042969 ], [ "0.1", -12.225957870483398 ], [ "mitted", -12.22596263885498 ], [ "zustellen", -12.225967407226562 ], [ "dominated", -12.226165771484375 ], [ "/16", -12.22623348236084 ], [ "POS", -12.226317405700684 ], [ "▁Zin", -12.226373672485352 ], [ "▁Okay", -12.226381301879883 ], [ "▁projected", -12.226405143737793 ], [ "▁selber", -12.226548194885254 ], [ "▁proiectului", -12.2266206741333 ], [ "▁Shell", -12.226683616638184 ], [ "▁cartridge", -12.226706504821777 ], [ "Message", -12.2267484664917 ], [ "haben", -12.226799964904785 ], [ "▁slides", -12.226829528808594 ], [ "▁gleichzeitig", -12.226886749267578 ], [ "▁Racing", -12.227051734924316 ], [ "▁20,", -12.227070808410645 ], [ "▁separat", -12.227094650268555 ], [ "▁repeatedly", -12.227110862731934 ], [ "▁casting", -12.22728157043457 ], [ "▁sacred", -12.227283477783203 ], [ "verfahren", -12.227387428283691 ], [ "▁echilibr", -12.227514266967773 ], [ "▁rebel", -12.2277250289917 ], [ "säu", -12.227794647216797 ], [ "ummy", -12.227815628051758 ], [ "▁backing", -12.227889060974121 ], [ "▁sponsors", -12.227912902832031 ], [ "▁Stress", -12.22802448272705 ], [ "▁Rules", -12.228083610534668 ], [ "▁render", -12.228241920471191 ], [ "▁funktioniert", -12.228384971618652 ], [ "▁Pearl", -12.228472709655762 ], [ "▁Scho", -12.228527069091797 ], [ "schwer", -12.228595733642578 ], [ "▁descoperit", -12.228702545166016 ], [ "holen", -12.228720664978027 ], [ "imposed", -12.228960990905762 ], [ "▁appearing", -12.228968620300293 ], [ "▁höher", -12.229082107543945 ], [ "▁Victorian", -12.229111671447754 ], [ "▁founding", -12.229155540466309 ], [ "▁Polish", -12.229239463806152 ], [ "▁anume", -12.229248046875 ], [ "Box", -12.229488372802734 ], [ "▁intrat", -12.229598999023438 ], [ "▁Inspiration", -12.229610443115234 ], [ "▁Canyon", -12.229625701904297 ], [ "▁Franklin", -12.22974681854248 ], [ "▁susceptible", -12.22982120513916 ], [ "trap", -12.229839324951172 ], [ "▁Roma", -12.23000717163086 ], [ "▁ethics", -12.230009078979492 ], [ "▁Privat", -12.230027198791504 ], [ "▁journalists", -12.230090141296387 ], [ "▁Universität", -12.230246543884277 ], [ "▁conditioner", -12.230308532714844 ], [ "folge", -12.230327606201172 ], [ "kirche", -12.230416297912598 ], [ "gehalten", -12.230530738830566 ], [ "midi", -12.230570793151855 ], [ "▁radar", -12.230619430541992 ], [ "▁Yard", -12.230775833129883 ], [ "▁professionnelle", -12.230863571166992 ], [ "▁Orchestra", -12.230870246887207 ], [ "▁immigrants", -12.230870246887207 ], [ "▁refined", -12.230929374694824 ], [ "▁Bishop", -12.231036186218262 ], [ "string", -12.231095314025879 ], [ "▁majoritatea", -12.231231689453125 ], [ "▁workflow", -12.23123836517334 ], [ "▁întreg", -12.231306076049805 ], [ "went", -12.231563568115234 ], [ "▁trat", -12.231689453125 ], [ "felul", -12.23176383972168 ], [ "▁hardwood", -12.231821060180664 ], [ "▁Task", -12.231867790222168 ], [ "branded", -12.231921195983887 ], [ "▁cinq", -12.231966018676758 ], [ "▁curb", -12.232041358947754 ], [ "▁Discount", -12.232043266296387 ], [ "▁Episode", -12.232131958007812 ], [ "▁Knowledge", -12.232144355773926 ], [ "▁tricky", -12.232173919677734 ], [ "▁characteristic", -12.232233047485352 ], [ "▁plata", -12.23226261138916 ], [ "▁Labour", -12.23232650756836 ], [ "▁Tha", -12.232372283935547 ], [ "▁Liefer", -12.232430458068848 ], [ "▁Reader", -12.232471466064453 ], [ "▁Linda", -12.232521057128906 ], [ "ittlerweile", -12.232552528381348 ], [ "defining", -12.232564926147461 ], [ "▁delayed", -12.232635498046875 ], [ "▁Bewertung", -12.232674598693848 ], [ "▁Unique", -12.232791900634766 ], [ "▁Champion", -12.232866287231445 ], [ "2008", -12.232897758483887 ], [ "▁conclu", -12.232934951782227 ], [ "▁câștig", -12.2329740524292 ], [ "▁scheduling", -12.2329740524292 ], [ "▁sailing", -12.233116149902344 ], [ "▁Storm", -12.23318862915039 ], [ "▁Stil", -12.23320198059082 ], [ "▁Album", -12.233211517333984 ], [ "▁ultime", -12.233343124389648 ], [ "url", -12.233369827270508 ], [ "▁terrific", -12.23339557647705 ], [ "▁remedy", -12.233396530151367 ], [ "▁Around", -12.233592987060547 ], [ "▁Kni", -12.233756065368652 ], [ "etty", -12.23376750946045 ], [ "Managing", -12.233809471130371 ], [ "▁Bedeutung", -12.233816146850586 ], [ "▁earthquake", -12.233817100524902 ], [ "▁Telefon", -12.233818054199219 ], [ "▁Upper", -12.233869552612305 ], [ "▁validation", -12.233892440795898 ], [ "-22", -12.233997344970703 ], [ "▁queue", -12.23401165008545 ], [ "tinde", -12.234025001525879 ], [ "built", -12.234047889709473 ], [ "▁voix", -12.234125137329102 ], [ "▁Resource", -12.234126091003418 ], [ "ţiuni", -12.234143257141113 ], [ "▁satisfying", -12.234299659729004 ], [ "▁Kohl", -12.234441757202148 ], [ "▁Materials", -12.234618186950684 ], [ "▁esp", -12.234732627868652 ], [ "enseignement", -12.234773635864258 ], [ "danach", -12.234883308410645 ], [ "peux", -12.234932899475098 ], [ "▁deployed", -12.235113143920898 ], [ "▁1976", -12.235126495361328 ], [ "ușor", -12.235334396362305 ], [ "élection", -12.235380172729492 ], [ "ettes", -12.235437393188477 ], [ "▁Madison", -12.235506057739258 ], [ "108", -12.235685348510742 ], [ "berger", -12.235696792602539 ], [ "▁pedal", -12.235702514648438 ], [ "▁quasi", -12.235820770263672 ], [ "▁lend", -12.235843658447266 ], [ "VER", -12.235940933227539 ], [ "▁chapters", -12.236002922058105 ], [ "▁idei", -12.23600959777832 ], [ "Deine", -12.236034393310547 ], [ "▁endure", -12.236092567443848 ], [ "▁Studios", -12.236259460449219 ], [ "structure", -12.236274719238281 ], [ "▁puiss", -12.236370086669922 ], [ "▁Morning", -12.236443519592285 ], [ "guide", -12.236462593078613 ], [ "▁Wave", -12.236617088317871 ], [ "▁banque", -12.236879348754883 ], [ "änd", -12.236912727355957 ], [ "oubli", -12.237070083618164 ], [ "▁mixer", -12.237125396728516 ], [ "▁remedi", -12.237210273742676 ], [ "▁scop", -12.237421989440918 ], [ "▁Rosen", -12.237561225891113 ], [ "▁spital", -12.23773193359375 ], [ "blau", -12.237811088562012 ], [ "▁financiar", -12.237865447998047 ], [ "avour", -12.237871170043945 ], [ "Def", -12.238025665283203 ], [ "▁socket", -12.238076210021973 ], [ "▁occurring", -12.238360404968262 ], [ "▁munci", -12.238368034362793 ], [ "▁realiza", -12.238426208496094 ], [ "▁beating", -12.2384614944458 ], [ "▁Phillip", -12.238490104675293 ], [ "▁courant", -12.238509178161621 ], [ "Auto", -12.238608360290527 ], [ "▁Lager", -12.238685607910156 ], [ "▁folos", -12.238696098327637 ], [ "▁moyens", -12.238770484924316 ], [ "▁Ec", -12.238780975341797 ], [ "▁Strip", -12.238788604736328 ], [ "sparen", -12.238848686218262 ], [ "▁Nintendo", -12.238886833190918 ], [ "▁Murphy", -12.238912582397461 ], [ "▁flux", -12.239034652709961 ], [ "▁mots", -12.239034652709961 ], [ "▁rechts", -12.239045143127441 ], [ "▁cardio", -12.239142417907715 ], [ "avoiding", -12.239343643188477 ], [ "érer", -12.239453315734863 ], [ "hiel", -12.239461898803711 ], [ "▁rezistent", -12.239521980285645 ], [ "close", -12.23954963684082 ], [ "hésitez", -12.239596366882324 ], [ "Hz", -12.239631652832031 ], [ "▁elaborate", -12.239689826965332 ], [ "▁permanently", -12.239709854125977 ], [ "▁Pittsburgh", -12.239734649658203 ], [ "▁counties", -12.239819526672363 ], [ "▁bookmark", -12.239919662475586 ], [ "▁Label", -12.239965438842773 ], [ "▁Freude", -12.239974021911621 ], [ "▁preferat", -12.239986419677734 ], [ "▁Mein", -12.239995002746582 ], [ "▁Crew", -12.240218162536621 ], [ "▁clips", -12.240253448486328 ], [ "8,000", -12.240263938903809 ], [ "▁recognise", -12.240311622619629 ], [ "ință", -12.240365028381348 ], [ "▁prieteni", -12.240447044372559 ], [ "Heute", -12.240522384643555 ], [ "ancienne", -12.240534782409668 ], [ "▁annoying", -12.240583419799805 ], [ "▁awful", -12.240704536437988 ], [ "▁Comments", -12.240774154663086 ], [ "▁musician", -12.240830421447754 ], [ "▁Elite", -12.241023063659668 ], [ "▁patri", -12.241024017333984 ], [ "▁Coupon", -12.241037368774414 ], [ "▁Farbe", -12.241097450256348 ], [ "▁contribui", -12.241110801696777 ], [ "hari", -12.241294860839844 ], [ "▁activitati", -12.24161148071289 ], [ "▁Traum", -12.2416410446167 ], [ "1.8", -12.24170207977295 ], [ "▁Healthcare", -12.24172306060791 ], [ "▁refresh", -12.241943359375 ], [ "▁Maha", -12.242060661315918 ], [ "▁dép", -12.242082595825195 ], [ "▁Studien", -12.242314338684082 ], [ "▁spectacol", -12.242378234863281 ], [ "impro", -12.24254035949707 ], [ "▁commentaire", -12.242544174194336 ], [ "ported", -12.242570877075195 ], [ "▁reclam", -12.242612838745117 ], [ "▁Verkauf", -12.242634773254395 ], [ "▁newspapers", -12.242661476135254 ], [ "▁iubit", -12.242838859558105 ], [ "▁Kenne", -12.242844581604004 ], [ "▁Consultant", -12.242958068847656 ], [ "▁stau", -12.242986679077148 ], [ "TON", -12.243057250976562 ], [ "▁Fehler", -12.243070602416992 ], [ "▁lettre", -12.243167877197266 ], [ "▁investigator", -12.243172645568848 ], [ "▁quantities", -12.243184089660645 ], [ "ogram", -12.243208885192871 ], [ "avaient", -12.24323844909668 ], [ "▁reducere", -12.243265151977539 ], [ "Lite", -12.243402481079102 ], [ "kurs", -12.243443489074707 ], [ "pré", -12.24383544921875 ], [ "pap", -12.243898391723633 ], [ "▁Männer", -12.243983268737793 ], [ "▁gauche", -12.244022369384766 ], [ "▁ähnlich", -12.244027137756348 ], [ "▁sunlight", -12.244063377380371 ], [ "▁rester", -12.24422550201416 ], [ "jumped", -12.244586944580078 ], [ "▁exclusiv", -12.24463176727295 ], [ "▁electoral", -12.244640350341797 ], [ "▁Portal", -12.244650840759277 ], [ "ulent", -12.244688987731934 ], [ "▁sonst", -12.24474048614502 ], [ "entraîne", -12.24483585357666 ], [ "▁repas", -12.244837760925293 ], [ "▁redus", -12.244858741760254 ], [ "aku", -12.244866371154785 ], [ "▁Graphic", -12.245251655578613 ], [ "▁geringe", -12.24539566040039 ], [ "plätze", -12.245474815368652 ], [ "Trebuie", -12.245479583740234 ], [ "▁rezultate", -12.245479583740234 ], [ "▁configure", -12.245683670043945 ], [ "▁PV", -12.245834350585938 ], [ "▁insect", -12.246109962463379 ], [ "▁Reviews", -12.246129035949707 ], [ "releasing", -12.246186256408691 ], [ "▁appliance", -12.246246337890625 ], [ "▁oferte", -12.246482849121094 ], [ "▁WILL", -12.246484756469727 ], [ "rion", -12.246499061584473 ], [ "▁Cole", -12.246582984924316 ], [ "▁1975", -12.246650695800781 ], [ "Admin", -12.24677848815918 ], [ "▁parade", -12.246800422668457 ], [ "▁mélange", -12.24692153930664 ], [ "▁shortage", -12.247007369995117 ], [ "▁Measure", -12.247400283813477 ], [ "anchmal", -12.24742603302002 ], [ "▁transfers", -12.247432708740234 ], [ "▁sistemului", -12.247573852539062 ], [ "▁deschide", -12.247819900512695 ], [ "▁Künstler", -12.247821807861328 ], [ "▁Plain", -12.247848510742188 ], [ "▁messaging", -12.247855186462402 ], [ "▁metabolism", -12.247879981994629 ], [ "fill", -12.248031616210938 ], [ "▁Bomb", -12.24814224243164 ], [ "usine", -12.248208045959473 ], [ "▁restart", -12.248233795166016 ], [ "▁Discussion", -12.248336791992188 ], [ "smith", -12.248472213745117 ], [ "▁Bh", -12.248607635498047 ], [ "▁sap", -12.248689651489258 ], [ "Moo", -12.248714447021484 ], [ "▁indirect", -12.248785972595215 ], [ "▁eingesetzt", -12.248863220214844 ], [ "▁Hip", -12.248870849609375 ], [ "▁iulie", -12.249113082885742 ], [ "▁atac", -12.249201774597168 ], [ "▁passport", -12.2492036819458 ], [ "▁Egyptian", -12.249290466308594 ], [ "▁soluți", -12.249349594116211 ], [ "▁cakes", -12.249356269836426 ], [ "▁Fellow", -12.24949836730957 ], [ "▁collision", -12.249533653259277 ], [ "▁abundant", -12.249961853027344 ], [ "▁Wonder", -12.24997329711914 ], [ "▁theories", -12.249991416931152 ], [ "landed", -12.250046730041504 ], [ "▁meantime", -12.2500638961792 ], [ "schlüsse", -12.25022029876709 ], [ "▁helicopter", -12.25039005279541 ], [ "Voici", -12.250479698181152 ], [ "▁Honey", -12.25049877166748 ], [ "▁deleted", -12.250511169433594 ], [ "▁Projekte", -12.250523567199707 ], [ "▁gasi", -12.2506742477417 ], [ "applique", -12.25068473815918 ], [ "TAL", -12.250699043273926 ], [ "notch", -12.250699996948242 ], [ "▁Response", -12.250818252563477 ], [ "▁deveni", -12.250818252563477 ], [ "▁regulate", -12.250829696655273 ], [ "▁vegetarian", -12.25083065032959 ], [ "▁Pastor", -12.250880241394043 ], [ "▁Strong", -12.250940322875977 ], [ "▁élèves", -12.251055717468262 ], [ "▁alimente", -12.25113582611084 ], [ "graphy", -12.251181602478027 ], [ "▁spirits", -12.251266479492188 ], [ "▁Cau", -12.251282691955566 ], [ "determin", -12.251304626464844 ], [ "arilor", -12.251382827758789 ], [ "▁masura", -12.251470565795898 ], [ "RAN", -12.251500129699707 ], [ "marked", -12.251564979553223 ], [ "cuba", -12.251602172851562 ], [ "omni", -12.251609802246094 ], [ "▁detox", -12.251662254333496 ], [ "▁quartz", -12.251741409301758 ], [ "▁Bug", -12.25177001953125 ], [ "▁Sugar", -12.25185775756836 ], [ "▁opponents", -12.25197982788086 ], [ "▁solved", -12.25207805633545 ], [ "semn", -12.252257347106934 ], [ "▁Prepare", -12.252558708190918 ], [ "ffel", -12.252586364746094 ], [ "▁Highlight", -12.252608299255371 ], [ "▁curent", -12.252618789672852 ], [ "▁praktisch", -12.252626419067383 ], [ "▁lending", -12.252676963806152 ], [ "▁minority", -12.252752304077148 ], [ "Free", -12.252970695495605 ], [ "business", -12.252997398376465 ], [ "▁outlook", -12.253097534179688 ], [ "▁assessments", -12.253168106079102 ], [ "▁Brother", -12.253266334533691 ], [ "▁partager", -12.25326919555664 ], [ "▁Brun", -12.25329303741455 ], [ "▁pedestrian", -12.25339412689209 ], [ "anța", -12.253413200378418 ], [ "▁recycled", -12.253457069396973 ], [ "▁quicker", -12.253626823425293 ], [ "▁lamps", -12.253683090209961 ], [ "▁nationally", -12.253813743591309 ], [ "▁Supplier", -12.253823280334473 ], [ "ograph", -12.253936767578125 ], [ "engage", -12.253981590270996 ], [ "▁Marg", -12.254131317138672 ], [ "▁aplicare", -12.254181861877441 ], [ "▁scared", -12.254194259643555 ], [ "▁accredited", -12.254255294799805 ], [ "▁outils", -12.25436019897461 ], [ "▁bâtiment", -12.254446029663086 ], [ "▁existed", -12.254586219787598 ], [ "gegangen", -12.254619598388672 ], [ "▁elevation", -12.25463581085205 ], [ "▁Tradition", -12.254670143127441 ], [ "▁Gericht", -12.254677772521973 ], [ "hub", -12.254680633544922 ], [ "strahl", -12.25473690032959 ], [ "build", -12.254796981811523 ], [ "▁Customers", -12.25487232208252 ], [ "klasse", -12.254890441894531 ], [ "▁pierre", -12.254895210266113 ], [ "(2)", -12.255006790161133 ], [ "Life", -12.255125999450684 ], [ "▁bachelor", -12.25513744354248 ], [ "▁quad", -12.255195617675781 ], [ "▁dispozitiv", -12.25523567199707 ], [ "106", -12.255266189575195 ], [ "▁suburb", -12.255495071411133 ], [ "▁1977", -12.255586624145508 ], [ "▁Alzheimer", -12.255973815917969 ], [ "▁spicy", -12.255988121032715 ], [ "▁spreading", -12.256002426147461 ], [ "nötigen", -12.256078720092773 ], [ "▁novels", -12.256104469299316 ], [ "▁responsabilité", -12.256141662597656 ], [ "▁Bud", -12.256332397460938 ], [ "▁desirable", -12.256407737731934 ], [ "TOR", -12.256444931030273 ], [ "five", -12.256547927856445 ], [ "▁Firmen", -12.256860733032227 ], [ "oeuvre", -12.257075309753418 ], [ "grass", -12.257233619689941 ], [ "▁practically", -12.257277488708496 ], [ "▁runners", -12.257281303405762 ], [ "▁mothers", -12.257341384887695 ], [ "Shop", -12.257345199584961 ], [ "▁Chicken", -12.257408142089844 ], [ "▁License", -12.257593154907227 ], [ "▁Bach", -12.25765323638916 ], [ "earliest", -12.257729530334473 ], [ "▁replica", -12.25774097442627 ], [ "▁haunt", -12.257833480834961 ], [ "▁materi", -12.257854461669922 ], [ "▁Finland", -12.257893562316895 ], [ "▁europene", -12.257919311523438 ], [ "abilă", -12.257944107055664 ], [ "cati", -12.258007049560547 ], [ "▁cholesterol", -12.258132934570312 ], [ "...).", -12.258151054382324 ], [ "cardi", -12.25838565826416 ], [ "▁(12", -12.258387565612793 ], [ "analyzed", -12.258506774902344 ], [ "▁respondents", -12.258591651916504 ], [ "▁höchste", -12.258646011352539 ], [ "▁Kern", -12.258647918701172 ], [ "▁knapp", -12.258781433105469 ], [ "▁Someone", -12.258955001831055 ], [ "▁équipé", -12.258997917175293 ], [ "credited", -12.259106636047363 ], [ "▁numar", -12.259163856506348 ], [ "▁Ace", -12.259185791015625 ], [ "zentrum", -12.2592191696167 ], [ "nehmer", -12.259270668029785 ], [ "arrivée", -12.259282112121582 ], [ "ELE", -12.259291648864746 ], [ "clean", -12.259418487548828 ], [ "Boost", -12.259538650512695 ], [ "call", -12.259575843811035 ], [ "▁Polizei", -12.259659767150879 ], [ "▁Januar", -12.259663581848145 ], [ "▁Tile", -12.259681701660156 ], [ "▁traduc", -12.259744644165039 ], [ "▁promptly", -12.259773254394531 ], [ "limit", -12.259809494018555 ], [ "▁recharge", -12.2598237991333 ], [ "▁wipe", -12.259862899780273 ], [ "▁Norway", -12.26001262664795 ], [ "▁Municipal", -12.260077476501465 ], [ "▁medieval", -12.260117530822754 ], [ "▁Treat", -12.26021671295166 ], [ "Orient", -12.260283470153809 ], [ "▁Stewart", -12.260294914245605 ], [ "▁lol", -12.26039981842041 ], [ "appartement", -12.260522842407227 ], [ "▁payer", -12.260655403137207 ], [ "▁splash", -12.260723114013672 ], [ "doubtedly", -12.260726928710938 ], [ "dry", -12.260846138000488 ], [ "▁Forex", -12.260939598083496 ], [ "▁Edinburgh", -12.260943412780762 ], [ "▁Traditional", -12.261032104492188 ], [ "▁1968", -12.261134147644043 ], [ "▁glow", -12.261248588562012 ], [ "Alternatively", -12.261265754699707 ], [ "▁partly", -12.261354446411133 ], [ "égi", -12.261401176452637 ], [ "▁Prices", -12.261640548706055 ], [ "haupt", -12.261651992797852 ], [ "▁sentences", -12.261711120605469 ], [ "ouvre", -12.261735916137695 ], [ "▁Liter", -12.261746406555176 ], [ "▁Important", -12.2620267868042 ], [ "▁Collins", -12.262077331542969 ], [ "▁reproduce", -12.262106895446777 ], [ "▁selten", -12.262124061584473 ], [ "▁Mitte", -12.262170791625977 ], [ "OA", -12.262174606323242 ], [ "▁Sister", -12.262358665466309 ], [ "▁responding", -12.262385368347168 ], [ "▁ballot", -12.262455940246582 ], [ "▁Nutrition", -12.262460708618164 ], [ "occurrence", -12.26246452331543 ], [ "Atunci", -12.262604713439941 ], [ "▁hockey", -12.262680053710938 ], [ "▁undertaking", -12.262697219848633 ], [ "▁educators", -12.262885093688965 ], [ "▁Swedish", -12.262893676757812 ], [ "▁Recovery", -12.262894630432129 ], [ "▁circum", -12.262910842895508 ], [ "▁chains", -12.263084411621094 ], [ "▁genug", -12.263113021850586 ], [ "▁Pil", -12.263227462768555 ], [ "▁farms", -12.263265609741211 ], [ "▁simplicity", -12.263336181640625 ], [ "-21", -12.263399124145508 ], [ "▁partition", -12.263493537902832 ], [ "▁Relations", -12.26360034942627 ], [ "zentrale", -12.263794898986816 ], [ "lapse", -12.263855934143066 ], [ "▁toast", -12.263862609863281 ], [ "▁citi", -12.263946533203125 ], [ "▁longtemps", -12.263984680175781 ], [ "maj", -12.264448165893555 ], [ "▁Cin", -12.264483451843262 ], [ "zeichen", -12.264504432678223 ], [ "▁Zoo", -12.264567375183105 ], [ "▁frisch", -12.264570236206055 ], [ "▁permettra", -12.264595031738281 ], [ "▁Liberty", -12.264642715454102 ], [ "▁playground", -12.264873504638672 ], [ "▁Mate", -12.265031814575195 ], [ "▁evolving", -12.265066146850586 ], [ "national", -12.265207290649414 ], [ "▁signifie", -12.265279769897461 ], [ "▁Related", -12.265292167663574 ], [ "NES", -12.265337944030762 ], [ "euil", -12.265473365783691 ], [ "▁struggles", -12.265542030334473 ], [ "▁instinct", -12.265628814697266 ], [ "arbre", -12.26608943939209 ], [ "▁commands", -12.266222953796387 ], [ "▁frumoase", -12.26637077331543 ], [ "▁watches", -12.266779899597168 ], [ "NM", -12.266804695129395 ], [ "▁influential", -12.266807556152344 ], [ "▁gewesen", -12.266901969909668 ], [ "▁Pictures", -12.267224311828613 ], [ "▁HVAC", -12.267242431640625 ], [ "▁skate", -12.26732063293457 ], [ "▁Robot", -12.267327308654785 ], [ "▁Boys", -12.267404556274414 ], [ "▁Mutter", -12.267425537109375 ], [ "▁marques", -12.267539024353027 ], [ "utiliser", -12.267793655395508 ], [ "▁amazed", -12.267799377441406 ], [ "ächtig", -12.26783275604248 ], [ "▁Success", -12.267870903015137 ], [ "gramm", -12.267956733703613 ], [ "▁1972", -12.267956733703613 ], [ "▁marina", -12.268269538879395 ], [ "▁lou", -12.268321990966797 ], [ "▁précis", -12.268380165100098 ], [ "ographic", -12.268482208251953 ], [ "people", -12.26848316192627 ], [ "fahr", -12.268547058105469 ], [ "▁Contemporary", -12.268550872802734 ], [ "▁frustrating", -12.26858139038086 ], [ "chide", -12.268704414367676 ], [ "1.5", -12.268807411193848 ], [ "▁ankle", -12.268850326538086 ], [ "▁proximity", -12.268986701965332 ], [ "▁Leute", -12.269006729125977 ], [ "UA", -12.269031524658203 ], [ "union", -12.269131660461426 ], [ "▁recovered", -12.269133567810059 ], [ "▁sword", -12.269216537475586 ], [ "▁Mut", -12.26923942565918 ], [ "▁Rin", -12.269360542297363 ], [ "▁lectures", -12.26942253112793 ], [ "▁licensing", -12.269423484802246 ], [ "MAC", -12.269498825073242 ], [ "▁commute", -12.269776344299316 ], [ "Acesta", -12.269858360290527 ], [ "▁Koch", -12.270088195800781 ], [ "▁depozit", -12.270119667053223 ], [ "▁erstmal", -12.270163536071777 ], [ "arhi", -12.270271301269531 ], [ "▁Normal", -12.270462036132812 ], [ "EZ", -12.270464897155762 ], [ "ărilor", -12.270986557006836 ], [ "▁favoris", -12.271041870117188 ], [ "▁$9", -12.271050453186035 ], [ "▁Lawrence", -12.271172523498535 ], [ "▁fixing", -12.271200180053711 ], [ "▁researching", -12.271288871765137 ], [ "▁Pant", -12.271467208862305 ], [ "▁candid", -12.271490097045898 ], [ "▁Arkansas", -12.27160930633545 ], [ "▁bitcoin", -12.271612167358398 ], [ "ва", -12.271645545959473 ], [ "▁Finger", -12.271692276000977 ], [ "▁SRL", -12.271718978881836 ], [ "Arg", -12.271797180175781 ], [ "trade", -12.271903991699219 ], [ "▁extraction", -12.271941184997559 ], [ "▁footprint", -12.2720308303833 ], [ "▁folosite", -12.272085189819336 ], [ "▁Flex", -12.272184371948242 ], [ "▁dys", -12.272294998168945 ], [ "▁Wright", -12.272343635559082 ], [ "▁multitude", -12.272378921508789 ], [ "▁Chu", -12.272494316101074 ], [ "▁Jerry", -12.27249526977539 ], [ "▁notebook", -12.272722244262695 ], [ "▁SIM", -12.272932052612305 ], [ "dietary", -12.272963523864746 ], [ "▁polished", -12.272984504699707 ], [ "▁carriers", -12.272993087768555 ], [ "▁cardiac", -12.27299976348877 ], [ "▁burned", -12.273038864135742 ], [ "▁sealed", -12.273062705993652 ], [ "▁pumps", -12.273224830627441 ], [ "▁consumed", -12.273233413696289 ], [ "▁Teaching", -12.273446083068848 ], [ "▁daughters", -12.27348518371582 ], [ "serviciile", -12.273600578308105 ], [ "▁Teams", -12.273690223693848 ], [ "▁avoided", -12.273903846740723 ], [ "▁compagnie", -12.274019241333008 ], [ "▁mașin", -12.274024963378906 ], [ "▁Sean", -12.27418041229248 ], [ "▁arunc", -12.274208068847656 ], [ "kräfte", -12.274238586425781 ], [ "vani", -12.274255752563477 ], [ "Metall", -12.27437973022461 ], [ "2009", -12.274449348449707 ], [ "moi", -12.274688720703125 ], [ "▁THAT", -12.274700164794922 ], [ "▁Ny", -12.274809837341309 ], [ "▁countertops", -12.274860382080078 ], [ "Pod", -12.274938583374023 ], [ "amente", -12.274943351745605 ], [ "▁offshore", -12.275001525878906 ], [ "luti", -12.275087356567383 ], [ "parked", -12.275160789489746 ], [ "ajout", -12.275247573852539 ], [ "Shirt", -12.275328636169434 ], [ "▁3/4", -12.275389671325684 ], [ "▁gratuite", -12.27543830871582 ], [ "mètres", -12.27557373046875 ], [ "▁Wish", -12.2755765914917 ], [ "▁holistic", -12.27558422088623 ], [ "gren", -12.275607109069824 ], [ "compiled", -12.275660514831543 ], [ "▁innocent", -12.275779724121094 ], [ "▁sorte", -12.275787353515625 ], [ "▁insulin", -12.275792121887207 ], [ "▁Academic", -12.275996208190918 ], [ "▁acrylic", -12.27600383758545 ], [ "▁hinzu", -12.27616024017334 ], [ "▁compression", -12.27619457244873 ], [ "▁viral", -12.276220321655273 ], [ "▁stereo", -12.2764892578125 ], [ "▁Concept", -12.276542663574219 ], [ "▁Margaret", -12.276659965515137 ], [ "▁consolidation", -12.276875495910645 ], [ "Figure", -12.277058601379395 ], [ "zzo", -12.277061462402344 ], [ "▁Egg", -12.277098655700684 ], [ "weiterhin", -12.277213096618652 ], [ "▁Vista", -12.277252197265625 ], [ "▁necessity", -12.277316093444824 ], [ "▁kayak", -12.277490615844727 ], [ "▁consensus", -12.277535438537598 ], [ "▁Katz", -12.277602195739746 ], [ "▁Warren", -12.277640342712402 ], [ "▁custody", -12.277755737304688 ], [ "++", -12.277759552001953 ], [ "▁paiement", -12.277782440185547 ], [ "▁foul", -12.277878761291504 ], [ "Chaque", -12.277934074401855 ], [ "▁Syrian", -12.277998924255371 ], [ "▁photographers", -12.278056144714355 ], [ "▁dismiss", -12.278270721435547 ], [ "▁Gaz", -12.278526306152344 ], [ "▁développer", -12.278529167175293 ], [ "▁Dakota", -12.27863883972168 ], [ "▁cardiovascular", -12.278642654418945 ], [ "▁tattoo", -12.278858184814453 ], [ "▁Lighting", -12.278918266296387 ], [ "▁nowhere", -12.278940200805664 ], [ "vada", -12.27895450592041 ], [ "▁Favor", -12.279084205627441 ], [ "ruled", -12.2791748046875 ], [ "▁Dating", -12.2793550491333 ], [ "gain", -12.279963493347168 ], [ "rism", -12.28016471862793 ], [ "coloured", -12.280169486999512 ], [ "▁refugees", -12.280184745788574 ], [ "▁Schm", -12.2803955078125 ], [ "▁happily", -12.280402183532715 ], [ "▁specification", -12.280607223510742 ], [ "WM", -12.280736923217773 ], [ "▁intro", -12.280823707580566 ], [ "rack", -12.28097915649414 ], [ "characterized", -12.28107738494873 ], [ "▁externe", -12.281136512756348 ], [ "▁arrives", -12.28114128112793 ], [ "WO", -12.281181335449219 ], [ "bericht", -12.281233787536621 ], [ "▁delays", -12.281242370605469 ], [ "▁Flight", -12.281256675720215 ], [ "1-3", -12.281524658203125 ], [ "▁Singh", -12.281548500061035 ], [ "▁shifting", -12.281651496887207 ], [ "▁dashboard", -12.281729698181152 ], [ "▁lieux", -12.281781196594238 ], [ "▁validate", -12.281901359558105 ], [ "▁uniquement", -12.281963348388672 ], [ "clip", -12.28199291229248 ], [ "cov", -12.282132148742676 ], [ "▁tendance", -12.282215118408203 ], [ "èle", -12.282258033752441 ], [ "▁incepe", -12.282261848449707 ], [ "▁chunk", -12.282585144042969 ], [ "▁Nr", -12.28266716003418 ], [ "▁Montana", -12.282674789428711 ], [ "▁sticks", -12.28277587890625 ], [ "▁caps", -12.28309154510498 ], [ "▁Jimmy", -12.283167839050293 ], [ "▁Levi", -12.283285140991211 ], [ "▁cables", -12.28345012664795 ], [ "▁SB", -12.283550262451172 ], [ "▁thème", -12.2836275100708 ], [ "ADA", -12.283672332763672 ], [ "▁garant", -12.283686637878418 ], [ "▁Joint", -12.283820152282715 ], [ "▁partage", -12.28398323059082 ], [ "schreib", -12.284119606018066 ], [ "ether", -12.28420352935791 ], [ "▁Klima", -12.284303665161133 ], [ "▁medicines", -12.284317016601562 ], [ "▁pH", -12.284320831298828 ], [ "Architect", -12.284378051757812 ], [ "știi", -12.284396171569824 ], [ "▁retrouve", -12.284700393676758 ], [ "▁posture", -12.284753799438477 ], [ "Feature", -12.284773826599121 ], [ "▁drying", -12.284884452819824 ], [ "trifft", -12.28488826751709 ], [ "ibi", -12.285079002380371 ], [ "▁rezerv", -12.285116195678711 ], [ "▁Vă", -12.28518009185791 ], [ "▁Speaker", -12.285282135009766 ], [ "▁illustration", -12.285319328308105 ], [ "oooo", -12.285419464111328 ], [ "▁initiated", -12.285518646240234 ], [ "PK", -12.285545349121094 ], [ "▁algorithms", -12.285630226135254 ], [ "▁zice", -12.285757064819336 ], [ "WI", -12.28581428527832 ], [ "urgence", -12.285823822021484 ], [ "▁bloggers", -12.285887718200684 ], [ "▁realitate", -12.285894393920898 ], [ "eks", -12.28598690032959 ], [ "▁cushions", -12.286149024963379 ], [ "▁Kri", -12.286224365234375 ], [ "▁réalisation", -12.286396026611328 ], [ "▁Photoshop", -12.286407470703125 ], [ "cret", -12.286462783813477 ], [ "faire", -12.286613464355469 ], [ "▁Cei", -12.286782264709473 ], [ "ICO", -12.286789894104004 ], [ "Contin", -12.28681755065918 ], [ "▁Builder", -12.286916732788086 ], [ "look", -12.28698444366455 ], [ "▁tenants", -12.287023544311523 ], [ "▁gloves", -12.287113189697266 ], [ "Day", -12.287169456481934 ], [ "firmly", -12.28725814819336 ], [ "CIA", -12.287352561950684 ], [ "▁TVA", -12.28741455078125 ], [ "▁notifications", -12.287446975708008 ], [ "▁Higher", -12.287459373474121 ], [ "▁Weihnachts", -12.287491798400879 ], [ "▁blur", -12.287755012512207 ], [ "ов", -12.288087844848633 ], [ "feder", -12.288159370422363 ], [ "▁explosion", -12.288171768188477 ], [ "▁Fenster", -12.288189888000488 ], [ "▁junge", -12.288225173950195 ], [ "▁Highland", -12.288230895996094 ], [ "▁Lü", -12.288290023803711 ], [ "▁Alba", -12.28832721710205 ], [ "▁Dort", -12.288338661193848 ], [ "▁recruiting", -12.28835391998291 ], [ "▁Multiple", -12.288549423217773 ], [ "▁animated", -12.288604736328125 ], [ "▁Virgin", -12.288637161254883 ], [ "1000", -12.288676261901855 ], [ "▁resin", -12.288700103759766 ], [ "▁matrix", -12.288826942443848 ], [ "irri", -12.289011001586914 ], [ "▁chiffre", -12.28904914855957 ], [ "▁Corps", -12.289252281188965 ], [ "▁advocacy", -12.28927230834961 ], [ "▁pozitiv", -12.289274215698242 ], [ "▁pouss", -12.289451599121094 ], [ "événement", -12.28950309753418 ], [ "▁pielii", -12.289717674255371 ], [ "onnais", -12.289750099182129 ], [ "▁Statement", -12.289754867553711 ], [ "crimin", -12.289868354797363 ], [ "hidrat", -12.289942741394043 ], [ "▁Jugendliche", -12.290057182312012 ], [ "TRI", -12.290223121643066 ], [ "erra", -12.290240287780762 ], [ "chat", -12.290321350097656 ], [ "▁traits", -12.290359497070312 ], [ "▁incentives", -12.29038143157959 ], [ "▁accelerate", -12.290568351745605 ], [ "woven", -12.290633201599121 ], [ "UST", -12.290688514709473 ], [ "▁premiers", -12.290717124938965 ], [ "▁Ferien", -12.290755271911621 ], [ "▁mariage", -12.290796279907227 ], [ "▁financially", -12.290801048278809 ], [ "gesellschaft", -12.290863037109375 ], [ "▁situaţi", -12.290865898132324 ], [ "▁quoted", -12.291373252868652 ], [ "▁periodic", -12.291421890258789 ], [ "▁chaos", -12.291543960571289 ], [ "▁remodel", -12.29159927368164 ], [ "▁Contractor", -12.291641235351562 ], [ "▁recuper", -12.291729927062988 ], [ "▁driveway", -12.291755676269531 ], [ "▁entertain", -12.291765213012695 ], [ "▁condus", -12.291769027709961 ], [ "▁chefs", -12.29184341430664 ], [ "pak", -12.291866302490234 ], [ "▁possède", -12.291948318481445 ], [ "▁outreach", -12.291984558105469 ], [ "▁navig", -12.292036056518555 ], [ "▁renewal", -12.292071342468262 ], [ "▁Rice", -12.292309761047363 ], [ "▁Czech", -12.292398452758789 ], [ "▁entstehen", -12.292445182800293 ], [ "▁droite", -12.292448997497559 ], [ "▁Investor", -12.292497634887695 ], [ "▁Soci", -12.29250431060791 ], [ "▁scalp", -12.292622566223145 ], [ "▁politiques", -12.292815208435059 ], [ "▁plaintiff", -12.292841911315918 ], [ "extending", -12.29287052154541 ], [ "▁paperwork", -12.29300594329834 ], [ "vizi", -12.293142318725586 ], [ "assisting", -12.29317569732666 ], [ "local", -12.293272972106934 ], [ "▁Wear", -12.293323516845703 ], [ "▁descend", -12.293340682983398 ], [ "▁Wikipedia", -12.293513298034668 ], [ "▁Consiliului", -12.293516159057617 ], [ "▁Nokia", -12.293540000915527 ], [ "▁facult", -12.293560028076172 ], [ "▁altogether", -12.293851852416992 ], [ "▁rankings", -12.29391860961914 ], [ "▁downloading", -12.293953895568848 ], [ "QU", -12.294007301330566 ], [ "▁Olive", -12.294041633605957 ], [ "▁backdrop", -12.294110298156738 ], [ "▁recomandat", -12.294116020202637 ], [ "▁Faculty", -12.294184684753418 ], [ "ANS", -12.294220924377441 ], [ "▁fracture", -12.294225692749023 ], [ "job", -12.29448127746582 ], [ "▁anticipate", -12.294525146484375 ], [ "▁drift", -12.294543266296387 ], [ "▁Marco", -12.294632911682129 ], [ "▁witnessed", -12.294700622558594 ], [ "▁comprend", -12.294974327087402 ], [ "▁bulb", -12.29504680633545 ], [ "▁shallow", -12.295059204101562 ], [ "stärke", -12.295063972473145 ], [ "▁Jessica", -12.295080184936523 ], [ "▁démarche", -12.29508113861084 ], [ "▁traditionally", -12.29508113861084 ], [ "Deputy", -12.295093536376953 ], [ "▁rivers", -12.295260429382324 ], [ "▁livraison", -12.29531192779541 ], [ "▁lacking", -12.295421600341797 ], [ "▁remodeling", -12.295426368713379 ], [ "▁acesteia", -12.295514106750488 ], [ "▁grosse", -12.295669555664062 ], [ "▁propus", -12.295833587646484 ], [ "lessly", -12.29587459564209 ], [ "▁Kredit", -12.295931816101074 ], [ "reputable", -12.295981407165527 ], [ "▁Sell", -12.2960205078125 ], [ "▁Crime", -12.296111106872559 ], [ "Ent", -12.296310424804688 ], [ "finity", -12.296422004699707 ], [ "▁Complex", -12.296500205993652 ], [ "easing", -12.296638488769531 ], [ "dynamic", -12.296670913696289 ], [ "▁eaten", -12.296727180480957 ], [ "gezogen", -12.296734809875488 ], [ "▁2004,", -12.296774864196777 ], [ "▁Muslims", -12.296822547912598 ], [ "▁Sprache", -12.296883583068848 ], [ "▁Truth", -12.296927452087402 ], [ "▁guarantees", -12.296928405761719 ], [ "/5", -12.29712963104248 ], [ "”).", -12.297135353088379 ], [ "▁Medium", -12.2972993850708 ], [ "▁décidé", -12.297445297241211 ], [ "▁balcony", -12.29747200012207 ], [ "leuchte", -12.297502517700195 ], [ "hik", -12.297849655151367 ], [ "▁Agriculture", -12.298221588134766 ], [ "▁securities", -12.298221588134766 ], [ "Probably", -12.298224449157715 ], [ "▁macar", -12.29824161529541 ], [ "▁Signal", -12.298399925231934 ], [ "lake", -12.298677444458008 ], [ "▁compétences", -12.298726081848145 ], [ "▁proprietary", -12.298812866210938 ], [ "allons", -12.298850059509277 ], [ "▁belongs", -12.298916816711426 ], [ "▁missile", -12.298958778381348 ], [ "țiune", -12.298999786376953 ], [ "▁Integration", -12.299116134643555 ], [ "▁testimony", -12.299120903015137 ], [ "▁wesentlich", -12.299142837524414 ], [ "▁donors", -12.299152374267578 ], [ "▁pivot", -12.299202919006348 ], [ "▁Uber", -12.299219131469727 ], [ "▁databases", -12.299281120300293 ], [ "▁studi", -12.299317359924316 ], [ "totdeauna", -12.299351692199707 ], [ "▁briefly", -12.299449920654297 ], [ "▁livr", -12.29952335357666 ], [ "▁CRM", -12.299581527709961 ], [ "gone", -12.299697875976562 ], [ "10)", -12.299761772155762 ], [ "▁zilele", -12.299920082092285 ], [ "Basically", -12.300008773803711 ], [ "▁medie", -12.300041198730469 ], [ "spotted", -12.30006217956543 ], [ "▁troubles", -12.30009937286377 ], [ "▁acknowledged", -12.300176620483398 ], [ "350", -12.300185203552246 ], [ "LB", -12.300273895263672 ], [ "Phy", -12.30038833618164 ], [ "natal", -12.300397872924805 ], [ "illé", -12.300445556640625 ], [ "bilder", -12.300625801086426 ], [ "▁apples", -12.300636291503906 ], [ "graphical", -12.300889015197754 ], [ "organiser", -12.301024436950684 ], [ "▁ochii", -12.301040649414062 ], [ "glas", -12.301178932189941 ], [ "CAP", -12.301180839538574 ], [ "▁Doors", -12.301331520080566 ], [ "▁Eis", -12.30156135559082 ], [ "tipuri", -12.301590919494629 ], [ "▁Worth", -12.301684379577637 ], [ "izează", -12.301719665527344 ], [ "nunț", -12.30180549621582 ], [ "▁Trip", -12.30186653137207 ], [ "ISS", -12.301976203918457 ], [ "efficient", -12.30201530456543 ], [ "Luckily", -12.302099227905273 ], [ "▁vase", -12.302133560180664 ], [ "▁gay", -12.302343368530273 ], [ "▁certificates", -12.302434921264648 ], [ "riad", -12.302549362182617 ], [ "stab", -12.302570343017578 ], [ "affiche", -12.302604675292969 ], [ "▁iPod", -12.302645683288574 ], [ "▁aștept", -12.302726745605469 ], [ "▁$500", -12.302751541137695 ], [ "▁Catherine", -12.302952766418457 ], [ "▁Circuit", -12.302957534790039 ], [ "▁ranch", -12.303045272827148 ], [ "▁consequence", -12.303118705749512 ], [ "listened", -12.303131103515625 ], [ "▁Options", -12.303187370300293 ], [ "feed", -12.30318832397461 ], [ "▁adviser", -12.303248405456543 ], [ "▁présenter", -12.30333423614502 ], [ "substant", -12.30337905883789 ], [ "▁Flag", -12.303604125976562 ], [ "▁Keith", -12.30366325378418 ], [ "▁inima", -12.303709983825684 ], [ "▁substrate", -12.30373764038086 ], [ "▁charger", -12.303803443908691 ], [ "▁reporter", -12.303844451904297 ], [ "ütz", -12.304068565368652 ], [ "▁unten", -12.30417537689209 ], [ "▁sympa", -12.304542541503906 ], [ "▁defeated", -12.304600715637207 ], [ "ändig", -12.304644584655762 ], [ "individu", -12.304747581481934 ], [ "▁Straßen", -12.304774284362793 ], [ "▁Nepal", -12.304791450500488 ], [ "million", -12.304803848266602 ], [ "▁Cake", -12.30499267578125 ], [ "▁investigations", -12.30526065826416 ], [ "▁inspector", -12.3054780960083 ], [ "▁Campbell", -12.305486679077148 ], [ "▁consommation", -12.305489540100098 ], [ "▁Ministerul", -12.305628776550293 ], [ "Advisory", -12.305749893188477 ], [ "▁Leistungs", -12.305939674377441 ], [ "▁Pull", -12.306157112121582 ], [ "▁lover", -12.306194305419922 ], [ "▁trunk", -12.306380271911621 ], [ "▁folosesc", -12.30639934539795 ], [ "pom", -12.306558609008789 ], [ "wunder", -12.306794166564941 ], [ "▁happier", -12.306801795959473 ], [ "▁embark", -12.30689525604248 ], [ "▁mediul", -12.3069486618042 ], [ "riff", -12.306973457336426 ], [ "▁copilul", -12.307039260864258 ], [ "ommage", -12.307126998901367 ], [ "rechnung", -12.307218551635742 ], [ "NU", -12.307220458984375 ], [ "▁fellowship", -12.307395935058594 ], [ "▁Mental", -12.307403564453125 ], [ "▁fever", -12.3074312210083 ], [ "▁silly", -12.307547569274902 ], [ "Object", -12.30756664276123 ], [ "NV", -12.307591438293457 ], [ "от", -12.30774974822998 ], [ "▁Strand", -12.307762145996094 ], [ "▁Exist", -12.30777359008789 ], [ "warum", -12.307832717895508 ], [ "CY", -12.307848930358887 ], [ "kä", -12.307856559753418 ], [ "!!!!!", -12.307869911193848 ], [ "▁moarte", -12.30793571472168 ], [ "▁waterfall", -12.308024406433105 ], [ "left", -12.30815601348877 ], [ "▁Nursing", -12.308225631713867 ], [ "▁invalid", -12.30826187133789 ], [ "struktur", -12.308385848999023 ], [ "Allerdings", -12.30838680267334 ], [ "étranger", -12.30838680267334 ], [ "▁prost", -12.308517456054688 ], [ "▁Parent", -12.308562278747559 ], [ "▁întreag", -12.308611869812012 ], [ "▁compensate", -12.308871269226074 ], [ "▁sometime", -12.308955192565918 ], [ "graduate", -12.308968544006348 ], [ "▁Carter", -12.30898380279541 ], [ "▁crap", -12.308998107910156 ], [ "▁mathematics", -12.309067726135254 ], [ "resemble", -12.309069633483887 ], [ "Dame", -12.309152603149414 ], [ "▁Swa", -12.309198379516602 ], [ "▁celebrity", -12.309239387512207 ], [ "▁verified", -12.309338569641113 ], [ "▁Behind", -12.309349060058594 ], [ "carbon", -12.309432983398438 ], [ "▁gateway", -12.309490203857422 ], [ "▁ambitious", -12.30952262878418 ], [ "▁Wellness", -12.30966567993164 ], [ "30,000", -12.30968189239502 ], [ "defined", -12.309929847717285 ], [ "specializes", -12.310121536254883 ], [ "▁Chase", -12.310199737548828 ], [ "HF", -12.310233116149902 ], [ "ABLE", -12.310348510742188 ], [ "▁Ehr", -12.310467720031738 ], [ "▁régime", -12.310480117797852 ], [ "▁awake", -12.310487747192383 ], [ "▁seafood", -12.310487747192383 ], [ "leading", -12.310554504394531 ], [ "▁Rule", -12.310602188110352 ], [ "verkehr", -12.310726165771484 ], [ "erem", -12.310737609863281 ], [ "▁1973", -12.310795783996582 ], [ "personal", -12.311171531677246 ], [ "ența", -12.311330795288086 ], [ "apprend", -12.311396598815918 ], [ "faisant", -12.311420440673828 ], [ "▁Sounds", -12.31151008605957 ], [ "▁Launch", -12.31151294708252 ], [ "half", -12.311636924743652 ], [ "▁verre", -12.311859130859375 ], [ "▁Regular", -12.31207275390625 ], [ "▁Nancy", -12.312142372131348 ], [ "quelles", -12.312161445617676 ], [ "▁erhält", -12.312169075012207 ], [ "▁socks", -12.3121919631958 ], [ "lamp", -12.312387466430664 ], [ "▁durchgeführt", -12.312472343444824 ], [ "▁advertise", -12.31260871887207 ], [ "powered", -12.312653541564941 ], [ "▁concur", -12.312699317932129 ], [ "▁ressources", -12.31293773651123 ], [ "▁allocation", -12.312986373901367 ], [ "chon", -12.313041687011719 ], [ "▁Larry", -12.313177108764648 ], [ "lässig", -12.313254356384277 ], [ "OLD", -12.313493728637695 ], [ "itty", -12.313599586486816 ], [ "▁immuno", -12.313645362854004 ], [ "▁(+", -12.313651084899902 ], [ "▁Essential", -12.313674926757812 ], [ "▁semaines", -12.313719749450684 ], [ "Ru", -12.31375503540039 ], [ "▁Gear", -12.313764572143555 ], [ "völlig", -12.313850402832031 ], [ "liga", -12.31391716003418 ], [ "▁Neg", -12.314082145690918 ], [ "▁gratitude", -12.31408977508545 ], [ "aventure", -12.314108848571777 ], [ "▁frustrated", -12.314115524291992 ], [ "▁retrait", -12.31422233581543 ], [ "▁statut", -12.314231872558594 ], [ "550", -12.31434440612793 ], [ "ла", -12.314428329467773 ], [ "risto", -12.314448356628418 ], [ "WAY", -12.314607620239258 ], [ "▁pigment", -12.314652442932129 ], [ "Selon", -12.314715385437012 ], [ "stil", -12.3148775100708 ], [ "▁Marin", -12.315055847167969 ], [ "ashi", -12.315085411071777 ], [ "▁contine", -12.31519889831543 ], [ "▁Economics", -12.315200805664062 ], [ "both", -12.3152437210083 ], [ "▁Dou", -12.31527328491211 ], [ "Fel", -12.315373420715332 ], [ "UNT", -12.315434455871582 ], [ "▁grandmother", -12.31548023223877 ], [ "▁domicile", -12.315678596496582 ], [ "▁buffer", -12.31574535369873 ], [ "▁fuse", -12.315815925598145 ], [ "▁dosage", -12.315821647644043 ], [ "▁Nici", -12.315839767456055 ], [ "▁worries", -12.315908432006836 ], [ "▁Rail", -12.3159818649292 ], [ "uneori", -12.315990447998047 ], [ "▁Sierra", -12.316030502319336 ], [ "▁porni", -12.316032409667969 ], [ "▁NOTE", -12.316056251525879 ], [ "▁tendency", -12.316065788269043 ], [ "Set", -12.316256523132324 ], [ "▁Hof", -12.31629753112793 ], [ "▁Ruhe", -12.316300392150879 ], [ "harm", -12.316360473632812 ], [ "▁Developer", -12.316367149353027 ], [ "suing", -12.316400527954102 ], [ "persönlichen", -12.31658935546875 ], [ "▁agréable", -12.316596031188965 ], [ "commissioned", -12.316696166992188 ], [ "▁1974", -12.31672191619873 ], [ "▁1969", -12.316758155822754 ], [ "▁regl", -12.316996574401855 ], [ "▁terror", -12.317042350769043 ], [ "▁température", -12.317051887512207 ], [ "▁Archiv", -12.31706714630127 ], [ "▁Military", -12.317140579223633 ], [ "▁König", -12.317290306091309 ], [ "▁forex", -12.31737232208252 ], [ "wiki", -12.31745719909668 ], [ "thetic", -12.317506790161133 ], [ "alaturi", -12.317974090576172 ], [ "▁montant", -12.3179931640625 ], [ "▁maladie", -12.318044662475586 ], [ "gust", -12.318151473999023 ], [ "▁demander", -12.318164825439453 ], [ "avocat", -12.318191528320312 ], [ "▁sci", -12.318192481994629 ], [ "▁Wireless", -12.318214416503906 ], [ "▁Dein", -12.318220138549805 ], [ "▁trio", -12.3183012008667 ], [ "▁Same", -12.318395614624023 ], [ "Datei", -12.318464279174805 ], [ "▁alerg", -12.318578720092773 ], [ "crowded", -12.318657875061035 ], [ "▁Punkt", -12.318853378295898 ], [ "▁sanctions", -12.318864822387695 ], [ "stating", -12.318922996520996 ], [ "▁discusse", -12.318949699401855 ], [ "▁Eigen", -12.319068908691406 ], [ "▁sănătate", -12.31911563873291 ], [ "▁correspondence", -12.319211959838867 ], [ "cred", -12.319331169128418 ], [ "VG", -12.319347381591797 ], [ "▁différence", -12.319347381591797 ], [ "▁Montreal", -12.319391250610352 ], [ "▁masini", -12.319398880004883 ], [ "iata", -12.319487571716309 ], [ "▁sampling", -12.319574356079102 ], [ "▁Gib", -12.319831848144531 ], [ "▁sheer", -12.319944381713867 ], [ "330", -12.319947242736816 ], [ "CHI", -12.319990158081055 ], [ "▁damn", -12.320030212402344 ], [ "▁Advisor", -12.320201873779297 ], [ "Typically", -12.320302963256836 ], [ "ssé", -12.320352554321289 ], [ "quart", -12.320361137390137 ], [ "chete", -12.320385932922363 ], [ "▁Puerto", -12.32049560546875 ], [ "2-1", -12.32050609588623 ], [ "NN", -12.320674896240234 ], [ "▁styling", -12.320707321166992 ], [ "rud", -12.320777893066406 ], [ "од", -12.320856094360352 ], [ "▁Hydro", -12.320941925048828 ], [ "▁Cable", -12.320961952209473 ], [ "video", -12.320974349975586 ], [ "▁Wirkung", -12.321194648742676 ], [ "▁noble", -12.321270942687988 ], [ "▁Sonder", -12.32129192352295 ], [ "mati", -12.321317672729492 ], [ "850", -12.321395874023438 ], [ "▁Richmond", -12.32143497467041 ], [ "▁niciodată", -12.321442604064941 ], [ "AO", -12.321527481079102 ], [ "▁altered", -12.321648597717285 ], [ "▁(15", -12.32168960571289 ], [ "▁Motiv", -12.322052001953125 ], [ "AKE", -12.322089195251465 ], [ "▁bestimmte", -12.322172164916992 ], [ "6.5", -12.322176933288574 ], [ "hectare", -12.322333335876465 ], [ "atorită", -12.322335243225098 ], [ "▁phases", -12.322447776794434 ], [ "▁Nova", -12.322566032409668 ], [ "ordinateur", -12.322579383850098 ], [ "▁corrupt", -12.322813034057617 ], [ "error", -12.322895050048828 ], [ "▁attacked", -12.323005676269531 ], [ "▁Kirche", -12.323019981384277 ], [ "heir", -12.323040962219238 ], [ "Das", -12.323254585266113 ], [ "▁anxious", -12.323258399963379 ], [ "▁Doc", -12.323386192321777 ], [ "▁Roth", -12.323415756225586 ], [ "▁Cine", -12.32388687133789 ], [ "▁auditor", -12.324418067932129 ], [ "▁beverage", -12.324586868286133 ], [ "▁précédent", -12.324637413024902 ], [ "▁deploy", -12.324837684631348 ], [ "▁accessibility", -12.324843406677246 ], [ "▁cage", -12.324885368347168 ], [ "▁Contra", -12.324934005737305 ], [ "Best", -12.324952125549316 ], [ "iji", -12.324972152709961 ], [ "▁père", -12.325060844421387 ], [ "▁scenic", -12.32511043548584 ], [ "synthesis", -12.325165748596191 ], [ "ßen", -12.32534408569336 ], [ "▁Videos", -12.325482368469238 ], [ "▁refus", -12.325484275817871 ], [ "stimmen", -12.3255615234375 ], [ "▁sleek", -12.325577735900879 ], [ "artige", -12.32563591003418 ], [ "mari", -12.32568359375 ], [ "▁excelent", -12.325740814208984 ], [ "▁negativ", -12.325806617736816 ], [ "▁blocking", -12.32590103149414 ], [ "spricht", -12.326001167297363 ], [ "▁discomfort", -12.32602310180664 ], [ "▁stratégie", -12.32602310180664 ], [ "▁Datenschutz", -12.326078414916992 ], [ "curg", -12.326128005981445 ], [ "▁lapte", -12.326432228088379 ], [ "▁acasă", -12.326491355895996 ], [ "▁ausschließlich", -12.32653522491455 ], [ "▁unbedingt", -12.326802253723145 ], [ "▁Linie", -12.32689380645752 ], [ "▁subscribers", -12.327019691467285 ], [ "109", -12.32702350616455 ], [ "▁Waste", -12.32712173461914 ], [ "▁Planung", -12.327231407165527 ], [ "▁visually", -12.32734489440918 ], [ "utilizarea", -12.327370643615723 ], [ "uba", -12.327381134033203 ], [ "▁fifteen", -12.327411651611328 ], [ "▁légère", -12.327411651611328 ], [ "ința", -12.327446937561035 ], [ "▁tolerance", -12.327460289001465 ], [ "▁piscine", -12.327536582946777 ], [ "▁nails", -12.327569007873535 ], [ "▁accus", -12.327693939208984 ], [ "▁coeur", -12.327773094177246 ], [ "freie", -12.327849388122559 ], [ "enţă", -12.32812213897705 ], [ "▁glucose", -12.328336715698242 ], [ "▁Jar", -12.32838249206543 ], [ "▁commencer", -12.328387260437012 ], [ "▁eliminating", -12.328414916992188 ], [ "▁mutation", -12.32844352722168 ], [ "▁afirma", -12.328444480895996 ], [ "▁Consulting", -12.328454971313477 ], [ "adia", -12.328543663024902 ], [ "zog", -12.328604698181152 ], [ "▁pielea", -12.328658103942871 ], [ "rton", -12.328706741333008 ], [ "exercice", -12.3287935256958 ], [ "namely", -12.328847885131836 ], [ "▁ajutor", -12.3289155960083 ], [ "▁markers", -12.328917503356934 ], [ "▁gardening", -12.328932762145996 ], [ "Karte", -12.329038619995117 ], [ "▁Pump", -12.329142570495605 ], [ "▁Dual", -12.329169273376465 ], [ "▁pratiques", -12.329349517822266 ], [ "▁behavioral", -12.329358100891113 ], [ "▁construire", -12.329511642456055 ], [ "▁Leonard", -12.329596519470215 ], [ "ediglich", -12.329630851745605 ], [ "ubbed", -12.3297758102417 ], [ "NK", -12.329792022705078 ], [ "shell", -12.329912185668945 ], [ "▁persönliche", -12.329996109008789 ], [ "ecuring", -12.329998970031738 ], [ "beaten", -12.33000373840332 ], [ "ALE", -12.330053329467773 ], [ "▁puppy", -12.33023452758789 ], [ "▁capac", -12.33027458190918 ], [ "▁seventh", -12.330394744873047 ], [ "▁nursery", -12.330400466918945 ], [ "▁Rum", -12.330419540405273 ], [ "▁exquisite", -12.330423355102539 ], [ "▁Legi", -12.330483436584473 ], [ "▁persist", -12.330497741699219 ], [ "bacterial", -12.330548286437988 ], [ "▁cereal", -12.330572128295898 ], [ "▁principe", -12.330693244934082 ], [ "chip", -12.330766677856445 ], [ "rush", -12.330832481384277 ], [ "▁funnel", -12.330904006958008 ], [ "▁calitatea", -12.331024169921875 ], [ "ibă", -12.33104419708252 ], [ "▁reign", -12.331086158752441 ], [ "▁congregation", -12.331120491027832 ], [ "▁obtine", -12.331270217895508 ], [ "▁découverte", -12.331286430358887 ], [ "▁gama", -12.331315040588379 ], [ "▁judec", -12.33132553100586 ], [ "Plan", -12.331351280212402 ], [ "▁gesture", -12.331539154052734 ], [ "öffentlichen", -12.331644058227539 ], [ "▁imported", -12.331693649291992 ], [ "▁rotate", -12.331747055053711 ], [ "blown", -12.331756591796875 ], [ "▁Protein", -12.331827163696289 ], [ "parfaitement", -12.331832885742188 ], [ "ondo", -12.331868171691895 ], [ "ologists", -12.331890106201172 ], [ "▁neighborhoods", -12.331989288330078 ], [ "▁Pope", -12.33202075958252 ], [ "▁museums", -12.332194328308105 ], [ "▁porter", -12.332330703735352 ], [ "▁kiss", -12.332335472106934 ], [ "pdf", -12.332354545593262 ], [ "sided", -12.332359313964844 ], [ "▁gern", -12.332395553588867 ], [ "bedingungen", -12.332496643066406 ], [ "▁Ride", -12.332582473754883 ], [ "Apoi", -12.332584381103516 ], [ "▁bestehen", -12.332603454589844 ], [ "5\"", -12.33285903930664 ], [ "bob", -12.332862854003906 ], [ "ficient", -12.33303165435791 ], [ "premise", -12.333086967468262 ], [ "▁Clip", -12.333112716674805 ], [ "▁concours", -12.333213806152344 ], [ "olar", -12.333281517028809 ], [ "▁Centr", -12.333356857299805 ], [ "outlined", -12.333429336547852 ], [ "▁observa", -12.333511352539062 ], [ "▁negotiate", -12.333537101745605 ], [ "▁Partnership", -12.33358383178711 ], [ "clock", -12.333662033081055 ], [ "roasted", -12.333755493164062 ], [ "Pourquoi", -12.33391284942627 ], [ "▁Marshall", -12.334005355834961 ], [ "▁Gerade", -12.334052085876465 ], [ "▁pachet", -12.334160804748535 ], [ "▁preliminary", -12.334162712097168 ], [ "▁tragic", -12.334200859069824 ], [ "author", -12.334268569946289 ], [ "▁Gov", -12.334309577941895 ], [ "▁comunic", -12.334403991699219 ], [ "▁coordinator", -12.334410667419434 ], [ "YA", -12.33445930480957 ], [ "▁Steam", -12.33476734161377 ], [ "▁Nag", -12.334796905517578 ], [ "▁Kara", -12.334851264953613 ], [ "▁Gang", -12.334858894348145 ], [ "aurez", -12.334868431091309 ], [ "▁horrible", -12.334869384765625 ], [ "▁Luxury", -12.335076332092285 ], [ "▁encouragement", -12.335169792175293 ], [ "▁conceptual", -12.335250854492188 ], [ "▁constituent", -12.335431098937988 ], [ "nvelop", -12.335494041442871 ], [ "ucc", -12.335500717163086 ], [ "▁conçu", -12.335542678833008 ], [ "pfel", -12.33559513092041 ], [ "special", -12.335700988769531 ], [ "▁Growth", -12.335834503173828 ], [ "cada", -12.335916519165039 ], [ "▁oamenilor", -12.335976600646973 ], [ "▁vendredi", -12.336021423339844 ], [ "▁coupe", -12.336055755615234 ], [ "▁Danke", -12.336134910583496 ], [ "reflects", -12.336181640625 ], [ "▁girlfriend", -12.336273193359375 ], [ "▁diffuse", -12.336325645446777 ], [ "HER", -12.336328506469727 ], [ "storing", -12.336464881896973 ], [ "ailing", -12.336591720581055 ], [ "▁Desi", -12.336601257324219 ], [ "stitution", -12.336832046508789 ], [ "▁adun", -12.336844444274902 ], [ "▁Partie", -12.336869239807129 ], [ "▁tissues", -12.336958885192871 ], [ "▁discovering", -12.337154388427734 ], [ "Jacques", -12.337178230285645 ], [ "lungs", -12.33724594116211 ], [ "▁Handy", -12.337261199951172 ], [ "centric", -12.337285995483398 ], [ "slav", -12.337442398071289 ], [ "▁sights", -12.337560653686523 ], [ "▁Category", -12.337644577026367 ], [ "▁Einrichtung", -12.337957382202148 ], [ "▁Robinson", -12.33804702758789 ], [ "▁Terra", -12.338150978088379 ], [ "▁creep", -12.338167190551758 ], [ "▁Lob", -12.338184356689453 ], [ "001", -12.33820629119873 ], [ "kop", -12.338208198547363 ], [ "Emb", -12.338292121887207 ], [ "▁forgive", -12.338391304016113 ], [ "▁icons", -12.33847427368164 ], [ "electric", -12.3385009765625 ], [ "▁faucet", -12.338516235351562 ], [ "▁invisible", -12.3386812210083 ], [ "sprach", -12.338801383972168 ], [ "▁beachten", -12.33881664276123 ], [ "rahm", -12.338833808898926 ], [ "▁Teacher", -12.338919639587402 ], [ "Fab", -12.339070320129395 ], [ "▁joue", -12.339101791381836 ], [ "▁Popular", -12.339120864868164 ], [ "▁Februar", -12.339171409606934 ], [ "sound", -12.339251518249512 ], [ "▁(0", -12.339317321777344 ], [ "▁Compare", -12.33938980102539 ], [ "▁pads", -12.339455604553223 ], [ "270", -12.339498519897461 ], [ "ousse", -12.339548110961914 ], [ "▁UAE", -12.339786529541016 ], [ "izări", -12.339787483215332 ], [ "▁bonuses", -12.33993911743164 ], [ "▁switches", -12.3400239944458 ], [ "▁Brothers", -12.340166091918945 ], [ "▁environmentally", -12.340171813964844 ], [ "vista", -12.340264320373535 ], [ "▁intentions", -12.3402738571167 ], [ "▁Terri", -12.340301513671875 ], [ "▁diabet", -12.34030532836914 ], [ "▁prese", -12.340333938598633 ], [ "▁parcurs", -12.340389251708984 ], [ "Warum", -12.340449333190918 ], [ "▁credentials", -12.340455055236816 ], [ "▁PLA", -12.34046459197998 ], [ "▁instruct", -12.340470314025879 ], [ "▁benefic", -12.340633392333984 ], [ "write", -12.340675354003906 ], [ "▁poids", -12.340773582458496 ], [ "▁Anspruch", -12.340923309326172 ], [ "▁avocado", -12.340923309326172 ], [ "▁inevitable", -12.340923309326172 ], [ "▁poorly", -12.340950965881348 ], [ "karte", -12.340994834899902 ], [ "▁Publishing", -12.340999603271484 ], [ "odată", -12.341140747070312 ], [ "▁scientifique", -12.341157913208008 ], [ "▁lăsa", -12.341262817382812 ], [ "▁secol", -12.34131908416748 ], [ "▁nevertheless", -12.341392517089844 ], [ "SAT", -12.341597557067871 ], [ "280", -12.341651916503906 ], [ "▁prevederi", -12.341670989990234 ], [ "▁chrome", -12.342002868652344 ], [ "institut", -12.342267036437988 ], [ "richtigen", -12.34228515625 ], [ "▁grief", -12.342338562011719 ], [ "▁penalties", -12.342373847961426 ], [ "▁Bayern", -12.34238052368164 ], [ "▁caramel", -12.342473983764648 ], [ "Now", -12.342495918273926 ], [ "Stiftung", -12.342576026916504 ], [ "country", -12.342737197875977 ], [ "dication", -12.34278678894043 ], [ "▁Chor", -12.342801094055176 ], [ "▁rămâne", -12.342936515808105 ], [ "▁TOP", -12.34300708770752 ], [ "▁complète", -12.34301471710205 ], [ "▁Marian", -12.34302806854248 ], [ "▁Avant", -12.343121528625488 ], [ "▁Shower", -12.343156814575195 ], [ "treu", -12.34316349029541 ], [ "▁chop", -12.34321403503418 ], [ "▁comfortably", -12.343220710754395 ], [ "▁autism", -12.34323787689209 ], [ "▁Sind", -12.34328556060791 ], [ "▁(20", -12.343340873718262 ], [ "▁Cinema", -12.343414306640625 ], [ "compania", -12.343606948852539 ], [ "▁Lex", -12.343622207641602 ], [ "▁Sofa", -12.343716621398926 ], [ "dru", -12.343753814697266 ], [ "▁verification", -12.343770027160645 ], [ "▁Immer", -12.343825340270996 ], [ "lomb", -12.343829154968262 ], [ "meric", -12.34385871887207 ], [ "▁slower", -12.34398365020752 ], [ "▁propag", -12.344090461730957 ], [ "Inter", -12.344097137451172 ], [ "selling", -12.34418773651123 ], [ "▁Bright", -12.344269752502441 ], [ "condition", -12.344280242919922 ], [ "PDF", -12.344291687011719 ], [ "oyez", -12.344391822814941 ], [ "▁Fried", -12.344420433044434 ], [ "▁Nazi", -12.34443187713623 ], [ "▁Buffalo", -12.344447135925293 ], [ "▁Sue", -12.344449043273926 ], [ "▁Rhein", -12.34468936920166 ], [ "▁Klaus", -12.344889640808105 ], [ "▁indiqu", -12.344963073730469 ], [ "echte", -12.344996452331543 ], [ "▁frecvent", -12.345165252685547 ], [ "▁conveniently", -12.345187187194824 ], [ "▁Moi", -12.345197677612305 ], [ "▁greenhouse", -12.345220565795898 ], [ "▁rédui", -12.34524154663086 ], [ "▁lengthy", -12.34542179107666 ], [ "verband", -12.345534324645996 ], [ "inţă", -12.345622062683105 ], [ "▁rigorous", -12.345625877380371 ], [ "▁Finish", -12.34580135345459 ], [ "▁FBI", -12.346052169799805 ], [ "cultura", -12.346083641052246 ], [ "▁compartment", -12.346110343933105 ], [ "▁pretend", -12.346117973327637 ], [ "▁assembled", -12.346212387084961 ], [ "▁Nie", -12.34639835357666 ], [ "fession", -12.34640884399414 ], [ "▁£2", -12.34642219543457 ], [ "algré", -12.3468017578125 ], [ "▁anterior", -12.346817970275879 ], [ "▁Wissenschaft", -12.34683609008789 ], [ "▁Harbor", -12.346923828125 ], [ "lix", -12.346985816955566 ], [ "=\"", -12.347049713134766 ], [ "▁breathtaking", -12.34705638885498 ], [ "▁Stern", -12.34708309173584 ], [ "▁Internetseite", -12.347132682800293 ], [ "▁locker", -12.347216606140137 ], [ "▁feather", -12.34726619720459 ], [ "Serv", -12.347297668457031 ], [ "▁snake", -12.347332000732422 ], [ "▁Border", -12.347396850585938 ], [ "▁undergo", -12.347518920898438 ], [ "▁petrol", -12.347558975219727 ], [ "▁dealership", -12.3475923538208 ], [ "▁commander", -12.347596168518066 ], [ "▁Monate", -12.347599983215332 ], [ "▁Guardian", -12.347665786743164 ], [ "▁Todd", -12.347774505615234 ], [ "Ann", -12.347825050354004 ], [ "ibilité", -12.347918510437012 ], [ "▁Quarter", -12.347987174987793 ], [ "▁portray", -12.348097801208496 ], [ "▁Tai", -12.34813404083252 ], [ "▁strikes", -12.348224639892578 ], [ "illage", -12.348381042480469 ], [ "▁IRS", -12.348417282104492 ], [ "▁lupta", -12.348455429077148 ], [ "▁Sper", -12.348493576049805 ], [ "PRO", -12.348530769348145 ], [ "▁Export", -12.348549842834473 ], [ "▁crypto", -12.348587989807129 ], [ "▁barbecue", -12.348692893981934 ], [ "▁portions", -12.348787307739258 ], [ "▁explicit", -12.348793983459473 ], [ "▁angenehm", -12.348834037780762 ], [ "▁marathon", -12.348946571350098 ], [ "▁apartament", -12.348982810974121 ], [ "▁Eva", -12.349079132080078 ], [ "plate", -12.349181175231934 ], [ "viel", -12.34925365447998 ], [ "FIN", -12.34926986694336 ], [ "dependent", -12.34935188293457 ], [ "▁cercet", -12.34942626953125 ], [ "▁midnight", -12.349499702453613 ], [ "copie", -12.349563598632812 ], [ "▁companii", -12.349621772766113 ], [ "▁tenu", -12.349660873413086 ], [ "1/2", -12.349662780761719 ], [ "2.4", -12.349693298339844 ], [ "abri", -12.349699974060059 ], [ "▁warn", -12.34980297088623 ], [ "▁luggage", -12.349875450134277 ], [ "numarul", -12.349968910217285 ], [ "▁contour", -12.350014686584473 ], [ "▁Ghost", -12.350016593933105 ], [ "Angaben", -12.35012435913086 ], [ "▁unemployment", -12.350296020507812 ], [ "▁rău", -12.350380897521973 ], [ "▁dispatch", -12.350445747375488 ], [ "investissement", -12.350547790527344 ], [ "▁passt", -12.35057258605957 ], [ "▁Germania", -12.350578308105469 ], [ "▁webpage", -12.350651741027832 ], [ "▁reservations", -12.350688934326172 ], [ "▁Kai", -12.350743293762207 ], [ "▁Cav", -12.350890159606934 ], [ "▁Patient", -12.351109504699707 ], [ "ер", -12.351213455200195 ], [ "▁Belle", -12.351236343383789 ], [ "▁Nashville", -12.351296424865723 ], [ "▁Talent", -12.351332664489746 ], [ "ouvrage", -12.351364135742188 ], [ "▁bekommt", -12.351365089416504 ], [ "USA", -12.351430892944336 ], [ "CES", -12.351432800292969 ], [ "▁Peru", -12.351499557495117 ], [ "▁erkennen", -12.35153579711914 ], [ "prinde", -12.351569175720215 ], [ "▁constitution", -12.351922035217285 ], [ "itatile", -12.351998329162598 ], [ "bah", -12.352147102355957 ], [ "▁avail", -12.352148056030273 ], [ "▁disponibile", -12.352149963378906 ], [ "hér", -12.352258682250977 ], [ "ол", -12.352411270141602 ], [ "▁startups", -12.352435111999512 ], [ "▁carton", -12.352485656738281 ], [ "▁Newsletter", -12.35251235961914 ], [ "éti", -12.352560997009277 ], [ "▁investigating", -12.352779388427734 ], [ "itul", -12.352925300598145 ], [ "touch", -12.352962493896484 ], [ "Sport", -12.353137016296387 ], [ "AME", -12.353203773498535 ], [ "MIN", -12.353222846984863 ], [ "metry", -12.353371620178223 ], [ "icy", -12.353492736816406 ], [ "▁Luna", -12.35351848602295 ], [ "▁asthma", -12.353614807128906 ], [ "▁conduc", -12.35365104675293 ], [ "▁Ari", -12.35369873046875 ], [ "trust", -12.353832244873047 ], [ "▁defines", -12.353894233703613 ], [ "▁Blend", -12.353927612304688 ], [ "azo", -12.353989601135254 ], [ "▁sweep", -12.354169845581055 ], [ "lope", -12.354331016540527 ], [ "ţinut", -12.35439682006836 ], [ "WD", -12.354503631591797 ], [ "▁appetite", -12.354619979858398 ], [ "▁Seed", -12.354753494262695 ], [ "Friend", -12.354854583740234 ], [ "▁repet", -12.354876518249512 ], [ "▁throat", -12.354936599731445 ], [ "philosoph", -12.355141639709473 ], [ "▁connaître", -12.355156898498535 ], [ "▁Counter", -12.355299949645996 ], [ "▁Anforderungen", -12.35533332824707 ], [ "▁Polit", -12.355363845825195 ], [ "▁Weather", -12.3554048538208 ], [ "bow", -12.355423927307129 ], [ "▁recreation", -12.355484008789062 ], [ "▁culinary", -12.355571746826172 ], [ "▁plage", -12.355609893798828 ], [ "▁Cruz", -12.355659484863281 ], [ "▁equip", -12.355668067932129 ], [ "▁Recent", -12.355697631835938 ], [ "LED", -12.355767250061035 ], [ "▁steak", -12.355772972106934 ], [ "▁belly", -12.355880737304688 ], [ "photo", -12.356130599975586 ], [ "▁lakes", -12.35623836517334 ], [ "▁intact", -12.356287956237793 ], [ "▁spiral", -12.356386184692383 ], [ "▁Billy", -12.356468200683594 ], [ "▁Understanding", -12.356534957885742 ], [ "▁Lay", -12.356558799743652 ], [ "▁roster", -12.356632232666016 ], [ "▁admire", -12.356647491455078 ], [ "▁android", -12.356732368469238 ], [ "▁technician", -12.356734275817871 ], [ "gène", -12.356818199157715 ], [ "motiv", -12.356954574584961 ], [ "▁Boat", -12.356988906860352 ], [ "▁genießen", -12.357000350952148 ], [ "▁Geschmack", -12.357001304626465 ], [ "▁heroes", -12.3570556640625 ], [ "▁1800", -12.357137680053711 ], [ "numeroase", -12.35776138305664 ], [ "▁anschließend", -12.357802391052246 ], [ "▁Spur", -12.357813835144043 ], [ "▁clarify", -12.35784912109375 ], [ "▁warmer", -12.357889175415039 ], [ "▁Ranch", -12.357955932617188 ], [ "▁simti", -12.358024597167969 ], [ "Thank", -12.35838508605957 ], [ "▁freight", -12.358434677124023 ], [ "▁administrators", -12.358453750610352 ], [ "Reg", -12.358588218688965 ], [ "Această", -12.358670234680176 ], [ "▁legume", -12.358741760253906 ], [ "▁utilizare", -12.358786582946777 ], [ "CON", -12.358904838562012 ], [ "urgi", -12.358917236328125 ], [ "▁Gesicht", -12.358920097351074 ], [ "▁counselor", -12.358954429626465 ], [ "▁mondiale", -12.359009742736816 ], [ "helm", -12.359137535095215 ], [ "▁Promo", -12.359156608581543 ], [ "▁Schweiz", -12.35917854309082 ], [ "Ich", -12.35929012298584 ], [ "▁intalni", -12.359295845031738 ], [ "▁Bloom", -12.359318733215332 ], [ "▁Score", -12.359362602233887 ], [ "▁Fruit", -12.35944652557373 ], [ "▁constraints", -12.359447479248047 ], [ "▁farmer", -12.359745979309082 ], [ "▁précise", -12.359807014465332 ], [ "evaluating", -12.359868049621582 ], [ "▁Period", -12.359891891479492 ], [ "byte", -12.359893798828125 ], [ "wah", -12.360025405883789 ], [ "Mac", -12.360123634338379 ], [ "iron", -12.360197067260742 ], [ "′", -12.360337257385254 ], [ "▁tehnic", -12.360539436340332 ], [ "▁legat", -12.36054515838623 ], [ "▁Pilot", -12.360574722290039 ], [ "▁Carpet", -12.36064624786377 ], [ "TEN", -12.360812187194824 ], [ "▁shareholders", -12.36082649230957 ], [ "vină", -12.360880851745605 ], [ "▁parole", -12.360939979553223 ], [ "ătă", -12.360984802246094 ], [ "bbing", -12.361000061035156 ], [ "▁switched", -12.361002922058105 ], [ "▁Petro", -12.361010551452637 ], [ "▁Vertrags", -12.36111831665039 ], [ "cham", -12.361178398132324 ], [ "wang", -12.361284255981445 ], [ "▁Bean", -12.36139965057373 ], [ "minister", -12.361442565917969 ], [ "▁Wu", -12.361522674560547 ], [ "▁Olympics", -12.361539840698242 ], [ "tipul", -12.361542701721191 ], [ "▁Citi", -12.36166763305664 ], [ "▁Fold", -12.361873626708984 ], [ "▁Partei", -12.361940383911133 ], [ "▁centrale", -12.361984252929688 ], [ "île", -12.362032890319824 ], [ "pflicht", -12.362175941467285 ], [ "heli", -12.362398147583008 ], [ "▁erwartet", -12.362414360046387 ], [ "▁oferta", -12.362458229064941 ], [ "▁NHS", -12.36246395111084 ], [ "annon", -12.362570762634277 ], [ "▁Rud", -12.362701416015625 ], [ "▁Stuttgart", -12.362737655639648 ], [ "▁rămas", -12.362746238708496 ], [ "▁eliminated", -12.36275577545166 ], [ "▁hiding", -12.362797737121582 ], [ "▁cadeau", -12.362832069396973 ], [ "▁mock", -12.363115310668945 ], [ "▁elder", -12.363333702087402 ], [ "▁Liz", -12.363364219665527 ], [ "aji", -12.363544464111328 ], [ "▁endlich", -12.363653182983398 ], [ "sufficient", -12.363668441772461 ], [ "▁zusätzliche", -12.363712310791016 ], [ "scient", -12.363757133483887 ], [ "▁Adjust", -12.363883972167969 ], [ "▁incentive", -12.363945007324219 ], [ "▁Papa", -12.364012718200684 ], [ "▁Pharma", -12.364041328430176 ], [ "▁conflicts", -12.364107131958008 ], [ "zählen", -12.364113807678223 ], [ "▁chien", -12.364118576049805 ], [ "KB", -12.36413288116455 ], [ "ultimi", -12.364188194274902 ], [ "▁Jul", -12.36421012878418 ], [ "▁Male", -12.36422061920166 ], [ "▁viewer", -12.36427116394043 ], [ "▁Sector", -12.364328384399414 ], [ "▁REAL", -12.364344596862793 ], [ "▁arbitr", -12.36436939239502 ], [ "resistant", -12.364399909973145 ], [ "▁Bristol", -12.364423751831055 ], [ "▁shy", -12.364540100097656 ], [ "SW", -12.364593505859375 ], [ "▁Kirk", -12.36460018157959 ], [ "centrul", -12.364653587341309 ], [ "▁Venezuela", -12.364657402038574 ], [ "▁communicating", -12.364657402038574 ], [ "▁Chemical", -12.364663124084473 ], [ "▁surprises", -12.364843368530273 ], [ "▁Jamie", -12.364933967590332 ], [ "▁Heavy", -12.364965438842773 ], [ "▁turnover", -12.36498737335205 ], [ "▁étudiants", -12.365114212036133 ], [ "welcher", -12.365124702453613 ], [ "▁preturi", -12.365200996398926 ], [ "▁Mono", -12.365283966064453 ], [ "▁paddle", -12.365309715270996 ], [ "▁accountability", -12.365364074707031 ], [ "OUS", -12.365592956542969 ], [ "▁marketers", -12.365762710571289 ], [ "fection", -12.365900993347168 ], [ "▁Outside", -12.365921020507812 ], [ "▁Jefferson", -12.366114616394043 ], [ "oaie", -12.36617660522461 ], [ "tenue", -12.366275787353516 ], [ "HU", -12.366329193115234 ], [ "Très", -12.36639404296875 ], [ "valoarea", -12.36642837524414 ], [ "103", -12.366482734680176 ], [ "▁Privacy", -12.366580963134766 ], [ "▁Leistungen", -12.366598129272461 ], [ "(3)", -12.36662483215332 ], [ "▁études", -12.366734504699707 ], [ "sko", -12.366750717163086 ], [ "drum", -12.366822242736816 ], [ "▁lamb", -12.366842269897461 ], [ "▁nicio", -12.367094993591309 ], [ "▁NATO", -12.367104530334473 ], [ "▁Freitag", -12.367178916931152 ], [ "▁precedent", -12.367178916931152 ], [ "▁partenaires", -12.367202758789062 ], [ "▁companiei", -12.367234230041504 ], [ "▁Plaza", -12.367249488830566 ], [ "▁disruption", -12.367274284362793 ], [ "▁violations", -12.367338180541992 ], [ "▁Reference", -12.367446899414062 ], [ "▁habitants", -12.36770248413086 ], [ "▁compost", -12.36776351928711 ], [ "▁citoyen", -12.367785453796387 ], [ "▁Historical", -12.367857933044434 ], [ "vollen", -12.36793327331543 ], [ "▁Eck", -12.36815357208252 ], [ "▁lumii", -12.368180274963379 ], [ "▁reusit", -12.368278503417969 ], [ "genic", -12.368307113647461 ], [ "Why", -12.368436813354492 ], [ "ASE", -12.368474006652832 ], [ "▁athlete", -12.36854076385498 ], [ "▁Spitze", -12.368559837341309 ], [ "▁schimbat", -12.368566513061523 ], [ "▁anonymous", -12.368850708007812 ], [ "jedes", -12.368856430053711 ], [ "exclu", -12.368874549865723 ], [ "factor", -12.369199752807617 ], [ "▁Dezember", -12.369231224060059 ], [ "▁scientist", -12.369373321533203 ], [ "▁likelihood", -12.36947250366211 ], [ "▁Rhode", -12.369488716125488 ], [ "▁Balance", -12.369521141052246 ], [ "istoria", -12.36959457397461 ], [ "▁Neil", -12.369780540466309 ], [ "▁bush", -12.369919776916504 ], [ "▁Ergebnisse", -12.369935989379883 ], [ "▁Sinn", -12.369956016540527 ], [ "▁spezielle", -12.370128631591797 ], [ "▁jucat", -12.37015438079834 ], [ "▁spite", -12.370179176330566 ], [ "▁Ultimate", -12.370365142822266 ], [ "▁fructe", -12.370401382446289 ], [ "▁asleep", -12.370441436767578 ], [ "▁Goal", -12.370539665222168 ], [ "▁PAR", -12.370631217956543 ], [ "▁rows", -12.370705604553223 ], [ "▁Fol", -12.3709135055542 ], [ "▁durata", -12.370945930480957 ], [ "▁traditionnel", -12.37100887298584 ], [ "▁tema", -12.37122917175293 ], [ "▁crédit", -12.371232986450195 ], [ "smallest", -12.371358871459961 ], [ "▁amino", -12.371358871459961 ], [ "▁elephant", -12.371405601501465 ], [ "▁tubes", -12.371685028076172 ], [ "▁Verwendung", -12.371719360351562 ], [ "▁Excellence", -12.371889114379883 ], [ "▁utilities", -12.371962547302246 ], [ "frau", -12.372111320495605 ], [ "▁poze", -12.3721342086792 ], [ "août", -12.372307777404785 ], [ "ango", -12.372514724731445 ], [ "give", -12.372532844543457 ], [ "▁appelé", -12.372576713562012 ], [ "▁yeast", -12.372671127319336 ], [ "▁enrollment", -12.372676849365234 ], [ "organiz", -12.3727445602417 ], [ "▁asociat", -12.372753143310547 ], [ "▁cattle", -12.372772216796875 ], [ "▁Solution", -12.372798919677734 ], [ "evoke", -12.372807502746582 ], [ "▁Hampshire", -12.372857093811035 ], [ "▁yeah", -12.372878074645996 ], [ "▁Argentina", -12.372928619384766 ], [ "▁abnormal", -12.373022079467773 ], [ "▁Heights", -12.373082160949707 ], [ "▁Mitchell", -12.373099327087402 ], [ "▁Quad", -12.373350143432617 ], [ "▁textures", -12.373382568359375 ], [ "▁coalition", -12.373384475708008 ], [ "▁dataset", -12.37338924407959 ], [ "World", -12.373438835144043 ], [ "ständ", -12.373456001281738 ], [ "▁groove", -12.373476028442383 ], [ "▁emotionally", -12.373562812805176 ], [ "▁preciz", -12.373636245727539 ], [ "kte", -12.373741149902344 ], [ "berechtigt", -12.373828887939453 ], [ "▁1971", -12.373888969421387 ], [ "grandes", -12.373907089233398 ], [ "▁Broadway", -12.37391185760498 ], [ "▁comunicat", -12.373994827270508 ], [ "nui", -12.37402629852295 ], [ "GER", -12.374079704284668 ], [ "pick", -12.374125480651855 ], [ "inscrit", -12.37414264678955 ], [ "▁Gross", -12.374258995056152 ], [ "▁McDonald", -12.374310493469238 ], [ "▁Zero", -12.374330520629883 ], [ "▁Halb", -12.374341011047363 ], [ "▁caractère", -12.374553680419922 ], [ "▁doctrine", -12.374553680419922 ], [ "▁Sinne", -12.37458610534668 ], [ "MLS", -12.374594688415527 ], [ "▁réel", -12.374759674072266 ], [ "▁Ful", -12.37476921081543 ], [ "limiting", -12.37483024597168 ], [ "▁Gan", -12.374870300292969 ], [ "▁exclude", -12.37490463256836 ], [ "imba", -12.374974250793457 ], [ "rolul", -12.374991416931152 ], [ "▁veggies", -12.375059127807617 ], [ "▁fasci", -12.375092506408691 ], [ "▁oval", -12.375173568725586 ], [ "▁contacter", -12.375221252441406 ], [ "▁linking", -12.375279426574707 ], [ "▁knit", -12.375308990478516 ], [ "▁enroll", -12.375504493713379 ], [ "▁dédié", -12.375533103942871 ], [ "▁renting", -12.375541687011719 ], [ "▁genera", -12.37567138671875 ], [ "citing", -12.375691413879395 ], [ "▁bend", -12.375700950622559 ], [ "guin", -12.375752449035645 ], [ "▁caregiver", -12.375768661499023 ], [ "▁könnt", -12.375791549682617 ], [ "▁Scripture", -12.375795364379883 ], [ "▁Mic", -12.375899314880371 ], [ "▁Denmark", -12.37590217590332 ], [ "▁qualifying", -12.375917434692383 ], [ "▁costumes", -12.375958442687988 ], [ "▁dwelling", -12.37601375579834 ], [ "▁recrut", -12.376099586486816 ], [ "▁bedding", -12.37618637084961 ], [ "gesprochen", -12.376253128051758 ], [ "▁editors", -12.376386642456055 ], [ "/12", -12.37657642364502 ], [ "▁cumparat", -12.376583099365234 ], [ "fiction", -12.376730918884277 ], [ "▁spinal", -12.376740455627441 ], [ "▁pathway", -12.376799583435059 ], [ "▁vârst", -12.37683391571045 ], [ "mba", -12.376874923706055 ], [ "▁enthusiastic", -12.37692642211914 ], [ "▁Watt", -12.37697982788086 ], [ "symptom", -12.376992225646973 ], [ "▁pup", -12.37712287902832 ], [ "▁glorious", -12.377225875854492 ], [ "▁fața", -12.377228736877441 ], [ "▁prohibited", -12.377256393432617 ], [ "vergleich", -12.377286911010742 ], [ "▁suspected", -12.377334594726562 ], [ "▁Railway", -12.377381324768066 ], [ "▁Aujourd", -12.377469062805176 ], [ "▁Patients", -12.377476692199707 ], [ "▁séance", -12.377501487731934 ], [ "▁contraire", -12.377503395080566 ], [ "▁cuvânt", -12.37771224975586 ], [ "▁trotzdem", -12.37773609161377 ], [ "émission", -12.377795219421387 ], [ "▁bore", -12.37782096862793 ], [ "▁safeguard", -12.377851486206055 ], [ "▁galleries", -12.37820053100586 ], [ "cron", -12.378268241882324 ], [ "▁Rica", -12.378335952758789 ], [ "fläche", -12.37839126586914 ], [ "▁Slow", -12.37842082977295 ], [ "▁vara", -12.378549575805664 ], [ "▁Swan", -12.378564834594727 ], [ "▁compounds", -12.378564834594727 ], [ "▁Slo", -12.378621101379395 ], [ "▁accommodations", -12.378621101379395 ], [ "▁Putin", -12.378708839416504 ], [ "▁undertaken", -12.378767967224121 ], [ "▁prépar", -12.37879467010498 ], [ "▁gandi", -12.37881088256836 ], [ "sediul", -12.378924369812012 ], [ "▁Nathan", -12.379143714904785 ], [ "▁fountain", -12.379173278808594 ], [ "▁mère", -12.379194259643555 ], [ "fatty", -12.379201889038086 ], [ "▁concentrated", -12.379241943359375 ], [ "richtung", -12.379300117492676 ], [ "▁appropriately", -12.37955379486084 ], [ "107", -12.379631996154785 ], [ "▁shark", -12.379735946655273 ], [ "▁Topic", -12.379867553710938 ], [ "▁Ausstellung", -12.379880905151367 ], [ "▁SUA", -12.380267143249512 ], [ "SER", -12.380359649658203 ], [ "▁Nicole", -12.38039779663086 ], [ "▁utilisateurs", -12.380620956420898 ], [ "▁Brazilian", -12.380753517150879 ], [ "▁continut", -12.380865097045898 ], [ "▁sanatate", -12.380881309509277 ], [ "faudra", -12.380882263183594 ], [ "nahm", -12.380938529968262 ], [ "▁Specific", -12.381153106689453 ], [ "aiba", -12.381199836730957 ], [ "cepând", -12.381296157836914 ], [ "▁Beer", -12.381366729736328 ], [ "roni", -12.381616592407227 ], [ "kay", -12.381636619567871 ], [ "▁gravity", -12.381844520568848 ], [ "▁verfügt", -12.381856918334961 ], [ "7:30", -12.381878852844238 ], [ "▁Players", -12.381945610046387 ], [ "▁Industries", -12.38198184967041 ], [ "punkte", -12.382119178771973 ], [ "▁yacht", -12.382135391235352 ], [ "-04", -12.382149696350098 ], [ "onné", -12.382192611694336 ], [ "▁Cards", -12.382221221923828 ], [ "▁fete", -12.382420539855957 ], [ "breaking", -12.38257884979248 ], [ "baum", -12.382621765136719 ], [ "nada", -12.382651329040527 ], [ "▁geplant", -12.382750511169434 ], [ "genuinely", -12.382766723632812 ], [ "talk", -12.382871627807617 ], [ "▁disadvantage", -12.382920265197754 ], [ "▁shutter", -12.383003234863281 ], [ "virus", -12.38302230834961 ], [ "▁cricket", -12.38308048248291 ], [ "▁comenzi", -12.383102416992188 ], [ "hier", -12.383170127868652 ], [ "▁aufzu", -12.383198738098145 ], [ "▁Rez", -12.38321304321289 ], [ "▁conclusions", -12.383329391479492 ], [ "▁Wang", -12.383509635925293 ], [ "Darüber", -12.383524894714355 ], [ "▁CSS", -12.383573532104492 ], [ "CW", -12.383780479431152 ], [ "▁Chr", -12.383790969848633 ], [ "▁traded", -12.383843421936035 ], [ "▁Schon", -12.384265899658203 ], [ "mped", -12.38429069519043 ], [ "▁alloy", -12.384385108947754 ], [ "AVE", -12.38451099395752 ], [ "▁imagery", -12.384542465209961 ], [ "▁resurse", -12.38479995727539 ], [ "▁Thunder", -12.384834289550781 ], [ "▁schimbare", -12.384860038757324 ], [ "▁Youtube", -12.38499927520752 ], [ "▁Monster", -12.385189056396484 ], [ "phil", -12.385234832763672 ], [ "▁bébé", -12.385284423828125 ], [ "Creating", -12.385428428649902 ], [ "ănă", -12.385466575622559 ], [ "▁Staat", -12.385504722595215 ], [ "adică", -12.385531425476074 ], [ "▁boyfriend", -12.385552406311035 ], [ "▁Winner", -12.385594367980957 ], [ "▁disputes", -12.385653495788574 ], [ "▁lush", -12.3856840133667 ], [ "▁CMS", -12.385719299316406 ], [ "▁locaux", -12.385725021362305 ], [ "▁Verfahren", -12.38576889038086 ], [ "▁Café", -12.385786056518555 ], [ "▁Vorstand", -12.385870933532715 ], [ "▁lucrat", -12.385960578918457 ], [ "▁Root", -12.38602352142334 ], [ "▁decis", -12.386059761047363 ], [ "▁Shadow", -12.386062622070312 ], [ "▁countryside", -12.386067390441895 ], [ "▁analiza", -12.386114120483398 ], [ "obos", -12.38616943359375 ], [ "opera", -12.386175155639648 ], [ "actu", -12.386207580566406 ], [ "▁Songs", -12.3864164352417 ], [ "reifen", -12.38648509979248 ], [ "▁hilft", -12.386650085449219 ], [ "region", -12.386727333068848 ], [ "▁categoria", -12.387001991271973 ], [ "capturing", -12.38701343536377 ], [ "▁1967", -12.387025833129883 ], [ "▁optimized", -12.387032508850098 ], [ "▁Dim", -12.387353897094727 ], [ "▁adapté", -12.387447357177734 ], [ "zeichnet", -12.387524604797363 ], [ "▁strada", -12.387625694274902 ], [ "fulness", -12.38774585723877 ], [ "▁technically", -12.38774585723877 ], [ "▁marker", -12.387757301330566 ], [ "▁vizita", -12.387808799743652 ], [ "▁imperative", -12.387986183166504 ], [ "▁pensé", -12.38802719116211 ], [ "▁drilling", -12.388030052185059 ], [ "ISA", -12.38818073272705 ], [ "▁Massage", -12.388201713562012 ], [ "▁Terry", -12.388238906860352 ], [ "▁pourtant", -12.38835334777832 ], [ "▁declaration", -12.388440132141113 ], [ "▁instructors", -12.388453483581543 ], [ "Eventually", -12.38847827911377 ], [ "▁banned", -12.38847827911377 ], [ "MAT", -12.388520240783691 ], [ "▁medici", -12.38856315612793 ], [ "▁Warm", -12.388615608215332 ], [ "▁trec", -12.388731002807617 ], [ "▁ecran", -12.388763427734375 ], [ "▁goat", -12.388838768005371 ], [ "▁manipulation", -12.388850212097168 ], [ "▁mayor", -12.388898849487305 ], [ "▁unterwegs", -12.388975143432617 ], [ "▁journals", -12.3890380859375 ], [ "▁hedge", -12.389239311218262 ], [ "Merc", -12.389300346374512 ], [ "▁joueurs", -12.389411926269531 ], [ "▁Religion", -12.3894624710083 ], [ "▁Mountains", -12.389477729797363 ], [ "▁renewed", -12.389497756958008 ], [ "▁Limit", -12.389543533325195 ], [ "ikea", -12.389771461486816 ], [ "▁utiliza", -12.38977336883545 ], [ "sogenannte", -12.389808654785156 ], [ "0.2", -12.389836311340332 ], [ "▁Organ", -12.38987922668457 ], [ "▁Shakespeare", -12.389952659606934 ], [ "▁Maintenance", -12.38995361328125 ], [ "▁Wärme", -12.389954566955566 ], [ "▁Northwest", -12.390060424804688 ], [ "▁numit", -12.390106201171875 ], [ "▁mica", -12.390165328979492 ], [ "turm", -12.390168190002441 ], [ "▁motivate", -12.390250205993652 ], [ "▁Staats", -12.390355110168457 ], [ "optimum", -12.390487670898438 ], [ "▁sortir", -12.390546798706055 ], [ "▁Asset", -12.390555381774902 ], [ "▁hervorragend", -12.390692710876465 ], [ "▁commentary", -12.39071273803711 ], [ "▁actuellement", -12.390732765197754 ], [ "NER", -12.390765190124512 ], [ "NL", -12.390789985656738 ], [ "ritt", -12.390803337097168 ], [ "▁Wirtschafts", -12.390813827514648 ], [ "träger", -12.390840530395508 ], [ "▁Versand", -12.390870094299316 ], [ "▁nostri", -12.390953063964844 ], [ "▁enorm", -12.391227722167969 ], [ "▁whale", -12.391260147094727 ], [ "▁Aufgabe", -12.391277313232422 ], [ "▁unfair", -12.391291618347168 ], [ "▁Cord", -12.391315460205078 ], [ "incorporating", -12.39134693145752 ], [ "luck", -12.39157772064209 ], [ "Afrique", -12.39168643951416 ], [ "▁coated", -12.391857147216797 ], [ "▁india", -12.391908645629883 ], [ "▁temporarily", -12.39193058013916 ], [ "▁ciuda", -12.392097473144531 ], [ "▁coral", -12.392184257507324 ], [ "▁wirkt", -12.392203330993652 ], [ "▁folding", -12.392309188842773 ], [ "wichtigsten", -12.392398834228516 ], [ "impacted", -12.392422676086426 ], [ "▁wählen", -12.392423629760742 ], [ "▁differentiate", -12.392492294311523 ], [ "▁froid", -12.392544746398926 ], [ "▁hug", -12.39255142211914 ], [ "▁construi", -12.39255428314209 ], [ "▁membru", -12.392603874206543 ], [ "▁masculin", -12.392667770385742 ], [ "partisan", -12.392711639404297 ], [ "▁schimba", -12.392725944519043 ], [ "▁economies", -12.392827987670898 ], [ "▁Abraham", -12.392914772033691 ], [ "wesen", -12.393013954162598 ], [ "enia", -12.393026351928711 ], [ "▁answering", -12.393080711364746 ], [ "▁activități", -12.39309024810791 ], [ "▁mémoire", -12.393160820007324 ], [ "▁versucht", -12.393305778503418 ], [ "ember", -12.39333438873291 ], [ "▁instala", -12.39334774017334 ], [ "▁eligibility", -12.393407821655273 ], [ "▁enjoyment", -12.393409729003906 ], [ "▁Arme", -12.39350414276123 ], [ "although", -12.393534660339355 ], [ "▁encompass", -12.393596649169922 ], [ "▁zufrieden", -12.393658638000488 ], [ "Script", -12.393691062927246 ], [ "KG", -12.39385986328125 ], [ "▁adhesive", -12.393902778625488 ], [ "▁Verkehrs", -12.393908500671387 ], [ "▁monitored", -12.394103050231934 ], [ "▁Conservation", -12.394148826599121 ], [ "hav", -12.394156455993652 ], [ "▁Above", -12.394174575805664 ], [ "▁Former", -12.394241333007812 ], [ "▁Certain", -12.394250869750977 ], [ "saving", -12.394311904907227 ], [ "▁Pun", -12.394390106201172 ], [ "▁awkward", -12.394397735595703 ], [ "▁Pretty", -12.394410133361816 ], [ "▁scanning", -12.394417762756348 ], [ "layer", -12.394527435302734 ], [ "motor", -12.39453125 ], [ "▁beginnt", -12.39455795288086 ], [ "▁affiliated", -12.394681930541992 ], [ "▁archives", -12.394686698913574 ], [ "▁sunshine", -12.394892692565918 ], [ "kha", -12.394988059997559 ], [ "▁investigated", -12.395149230957031 ], [ "▁fantas", -12.395277976989746 ], [ "▁united", -12.395355224609375 ], [ "allegedly", -12.395373344421387 ], [ "▁Eugen", -12.3955078125 ], [ "▁proprie", -12.395843505859375 ], [ "uca", -12.396183013916016 ], [ "DES", -12.396187782287598 ], [ "ştii", -12.396190643310547 ], [ "▁Running", -12.39620590209961 ], [ "lbstverständlich", -12.396248817443848 ], [ "index", -12.396300315856934 ], [ "▁studiu", -12.396512031555176 ], [ "URE", -12.396553039550781 ], [ "gültig", -12.396627426147461 ], [ "▁lundi", -12.396649360656738 ], [ "▁Zucker", -12.396650314331055 ], [ "▁positively", -12.396721839904785 ], [ "folgenden", -12.396758079528809 ], [ "anță", -12.396800994873047 ], [ "▁clan", -12.396866798400879 ], [ "▁literacy", -12.396879196166992 ], [ "▁ober", -12.39699935913086 ], [ "John", -12.397003173828125 ], [ "greg", -12.39700984954834 ], [ "▁titlu", -12.397049903869629 ], [ "▁ţări", -12.39707088470459 ], [ "Bra", -12.397100448608398 ], [ "▁Evans", -12.397164344787598 ], [ "modern", -12.397172927856445 ], [ "▁hauteur", -12.397353172302246 ], [ "refers", -12.397416114807129 ], [ "▁plasma", -12.397575378417969 ], [ "▁optic", -12.397595405578613 ], [ "▁shampoo", -12.397619247436523 ], [ "▁cheek", -12.397727966308594 ], [ "opted", -12.397741317749023 ], [ "▁persönlich", -12.397832870483398 ], [ "▁1945", -12.398118019104004 ], [ "ICI", -12.398193359375 ], [ "biotic", -12.398222923278809 ], [ "▁Beruf", -12.398372650146484 ], [ "▁trez", -12.398383140563965 ], [ "▁diploma", -12.398388862609863 ], [ "nahmen", -12.398421287536621 ], [ "▁curl", -12.398625373840332 ], [ "▁agricole", -12.398824691772461 ], [ "▁recomand", -12.398844718933105 ], [ "▁pediatric", -12.398862838745117 ], [ "Fiecare", -12.39887523651123 ], [ "Anlage", -12.398906707763672 ], [ "weiß", -12.398974418640137 ], [ "elecommunication", -12.39898681640625 ], [ "hog", -12.399184226989746 ], [ "▁Stamp", -12.399364471435547 ], [ "▁Tipp", -12.399369239807129 ], [ "▁kindness", -12.399415969848633 ], [ "▁Marina", -12.399577140808105 ], [ "▁Gleich", -12.39963436126709 ], [ "▁grij", -12.39970588684082 ], [ "▁desperate", -12.39974594116211 ], [ "▁recordings", -12.399842262268066 ], [ "▁neglect", -12.399861335754395 ], [ "▁inherent", -12.400035858154297 ], [ "▁Rezept", -12.400138854980469 ], [ "▁soins", -12.400164604187012 ], [ "▁brut", -12.400250434875488 ], [ "▁revolutionary", -12.400495529174805 ], [ "▁liberté", -12.400530815124512 ], [ "cours", -12.400945663452148 ], [ "▁Similar", -12.401247024536133 ], [ "▁cheveux", -12.40136432647705 ], [ "▁ieftin", -12.401599884033203 ], [ "▁promovare", -12.40160846710205 ], [ "▁grains", -12.401729583740234 ], [ "ти", -12.401749610900879 ], [ "▁fonctionnement", -12.401789665222168 ], [ "▁Coming", -12.401832580566406 ], [ "▁analytical", -12.401847839355469 ], [ "▁simplify", -12.401856422424316 ], [ "▁chambres", -12.401893615722656 ], [ "▁fifty", -12.401930809020996 ], [ "jour", -12.402070999145508 ], [ "▁(17", -12.402194023132324 ], [ "cărui", -12.402292251586914 ], [ "▁harmony", -12.402352333068848 ], [ "grin", -12.402355194091797 ], [ "▁drunk", -12.402359962463379 ], [ "260", -12.402374267578125 ], [ "3-5", -12.40243148803711 ], [ "▁articole", -12.402442932128906 ], [ "▁flooding", -12.402482986450195 ], [ "halle", -12.402580261230469 ], [ "▁defects", -12.40276050567627 ], [ "▁rifle", -12.402839660644531 ], [ "▁Boc", -12.402843475341797 ], [ "▁Athletic", -12.40284538269043 ], [ "▁acordat", -12.40292739868164 ], [ "AIR", -12.402969360351562 ], [ "▁entwickeln", -12.403104782104492 ], [ "▁Advance", -12.403188705444336 ], [ "▁Heil", -12.403216361999512 ], [ "Stainless", -12.403345108032227 ], [ "▁Psychology", -12.40337085723877 ], [ "▁omul", -12.403435707092285 ], [ "▁Arbeiten", -12.403494834899902 ], [ "▁rabbit", -12.403495788574219 ], [ "▁méta", -12.40351390838623 ], [ "ismul", -12.403534889221191 ], [ "▁Herausforderung", -12.403594970703125 ], [ "▁Euch", -12.403654098510742 ], [ "geschichte", -12.40390682220459 ], [ "▁Milk", -12.404057502746582 ], [ "▁pregăt", -12.404065132141113 ], [ "▁Standort", -12.404141426086426 ], [ "Val", -12.404180526733398 ], [ "▁Ronald", -12.404350280761719 ], [ "▁Werbe", -12.404558181762695 ], [ "▁restrict", -12.404658317565918 ], [ "▁tablespoon", -12.404844284057617 ], [ "▁Amendment", -12.404845237731934 ], [ "▁Johnny", -12.404914855957031 ], [ "▁lively", -12.404938697814941 ], [ "ORD", -12.405147552490234 ], [ "▁mulţi", -12.40523624420166 ], [ "èrent", -12.405241012573242 ], [ "Every", -12.405277252197266 ], [ "eignet", -12.405296325683594 ], [ "GD", -12.40546989440918 ], [ "▁Ghana", -12.405628204345703 ], [ "▁wealthy", -12.40576171875 ], [ "▁advocates", -12.405818939208984 ], [ "▁Campaign", -12.40584659576416 ], [ "▁posters", -12.405964851379395 ], [ "flug", -12.406011581420898 ], [ "▁métier", -12.406139373779297 ], [ "kir", -12.406148910522461 ], [ "bond", -12.406176567077637 ], [ "datorita", -12.406188011169434 ], [ "▁Hochzeit", -12.406230926513672 ], [ "▁effectué", -12.406271934509277 ], [ "▁angles", -12.40654182434082 ], [ "▁Electrical", -12.406705856323242 ], [ "▁Administrator", -12.40674114227295 ], [ "▁spur", -12.407389640808105 ], [ "▁größere", -12.407444953918457 ], [ "woke", -12.407515525817871 ], [ "▁gewinnen", -12.407689094543457 ], [ "▁ajută", -12.407712936401367 ], [ "▁ventilation", -12.407853126525879 ], [ "▁viaţa", -12.407853126525879 ], [ "▁Dinner", -12.408079147338867 ], [ "respond", -12.408095359802246 ], [ "▁OEM", -12.408120155334473 ], [ "▁affair", -12.4081392288208 ], [ "▁öffentlich", -12.408143043518066 ], [ "ENS", -12.408209800720215 ], [ "▁Cent", -12.408224105834961 ], [ "▁făc", -12.408267974853516 ], [ "▁Doppel", -12.408285140991211 ], [ "▁fericit", -12.408363342285156 ], [ "▁coordon", -12.40845775604248 ], [ "geht", -12.408547401428223 ], [ "▁perfekte", -12.408610343933105 ], [ "▁sportive", -12.408700942993164 ], [ "▁proiectul", -12.40870189666748 ], [ "▁deadly", -12.408804893493652 ], [ "Geschäft", -12.408822059631348 ], [ "▁inspirational", -12.408854484558105 ], [ "+1", -12.409013748168945 ], [ "▁pearl", -12.409022331237793 ], [ "▁scrub", -12.409036636352539 ], [ "▁scheint", -12.409079551696777 ], [ "poo", -12.409147262573242 ], [ "▁Pier", -12.409220695495605 ], [ "▁commented", -12.409285545349121 ], [ "lute", -12.409302711486816 ], [ "▁cancelled", -12.409488677978516 ], [ "Win", -12.409605979919434 ], [ "▁payroll", -12.409781455993652 ], [ "▁varsta", -12.409881591796875 ], [ "stuffed", -12.410097122192383 ], [ "▁beads", -12.410138130187988 ], [ "▁poems", -12.410356521606445 ], [ "pokesman", -12.410399436950684 ], [ "▁checklist", -12.410523414611816 ], [ "▁Mich", -12.410636901855469 ], [ "GEN", -12.410676002502441 ], [ "▁Lau", -12.410783767700195 ], [ "▁stie", -12.410965919494629 ], [ "▁Lovely", -12.4110107421875 ], [ "▁Anschluss", -12.411062240600586 ], [ "▁personaj", -12.41108226776123 ], [ "▁ausgestattet", -12.411121368408203 ], [ "▁beginners", -12.411163330078125 ], [ "▁noon", -12.411189079284668 ], [ "▁celule", -12.41128921508789 ], [ "Trans", -12.411324501037598 ], [ "boot", -12.411331176757812 ], [ "▁drumul", -12.41136646270752 ], [ "gruppen", -12.41140079498291 ], [ "étend", -12.41140365600586 ], [ "▁risques", -12.411405563354492 ], [ "acclaimed", -12.411447525024414 ], [ "▁celelalte", -12.411617279052734 ], [ "▁condiţii", -12.411620140075684 ], [ "▁skiing", -12.411685943603516 ], [ "▁optimale", -12.411689758300781 ], [ "technology", -12.411773681640625 ], [ "▁renew", -12.411784172058105 ], [ "Cloud", -12.41179084777832 ], [ "▁damaging", -12.411905288696289 ], [ "GT", -12.412219047546387 ], [ "▁Reform", -12.41230583190918 ], [ "vedem", -12.412349700927734 ], [ "▁indicat", -12.412461280822754 ], [ "▁Maker", -12.412467002868652 ], [ "▁lichid", -12.412582397460938 ], [ "3.1", -12.412614822387695 ], [ "păt", -12.412620544433594 ], [ "lumina", -12.41264820098877 ], [ "▁Situ", -12.412806510925293 ], [ "▁Archives", -12.412857055664062 ], [ "▁allergies", -12.41287899017334 ], [ "▁Cameron", -12.412883758544922 ], [ "▁Immun", -12.412899017333984 ], [ "wissenschaftlich", -12.41301441192627 ], [ "▁supplémentaire", -12.413128852844238 ], [ "▁puterea", -12.413261413574219 ], [ "Lab", -12.413331985473633 ], [ "inspired", -12.413384437561035 ], [ "▁shrink", -12.413403511047363 ], [ "▁voit", -12.413426399230957 ], [ "▁chopped", -12.413467407226562 ], [ "▁Franz", -12.413537979125977 ], [ "oku", -12.413652420043945 ], [ "▁suppress", -12.413673400878906 ], [ "▁impress", -12.413751602172852 ], [ "▁Liga", -12.413755416870117 ], [ "▁Eight", -12.41378402709961 ], [ "720", -12.413795471191406 ], [ "▁securely", -12.413870811462402 ], [ "KU", -12.413934707641602 ], [ "modell", -12.413992881774902 ], [ "Ensure", -12.414154052734375 ], [ "größte", -12.414204597473145 ], [ "▁réuni", -12.414215087890625 ], [ "▁Internal", -12.41423225402832 ], [ "▁Punkte", -12.414320945739746 ], [ "▁replicate", -12.414412498474121 ], [ "▁spreadsheet", -12.414434432983398 ], [ "▁Hindu", -12.414549827575684 ], [ "▁Cham", -12.414578437805176 ], [ "nati", -12.414670944213867 ], [ "imply", -12.414679527282715 ], [ "funded", -12.414894104003906 ], [ "▁charitable", -12.414896011352539 ], [ "▁imagined", -12.415014266967773 ], [ "hausen", -12.41517448425293 ], [ "Keeping", -12.415239334106445 ], [ "▁attitudes", -12.415287971496582 ], [ "esque", -12.415365219116211 ], [ "▁Tennis", -12.415409088134766 ], [ "Jeremy", -12.415410041809082 ], [ "▁majeur", -12.415475845336914 ], [ "▁stii", -12.4155912399292 ], [ "▁herbal", -12.415790557861328 ], [ "▁cauta", -12.41580867767334 ], [ "▁voluntary", -12.415828704833984 ], [ "wohl", -12.415877342224121 ], [ "▁ideea", -12.41588306427002 ], [ "▁WW", -12.415899276733398 ], [ "▁erneut", -12.416010856628418 ], [ "größten", -12.416094779968262 ], [ "Grâce", -12.416159629821777 ], [ "▁Köln", -12.416193008422852 ], [ "▁mobilier", -12.416199684143066 ], [ "▁fool", -12.416254043579102 ], [ "▁Calcul", -12.416295051574707 ], [ "attaque", -12.41637897491455 ], [ "▁digestive", -12.41656494140625 ], [ "performance", -12.416647911071777 ], [ "▁homeowner", -12.41675853729248 ], [ "▁hunger", -12.4169282913208 ], [ "2.3", -12.41696834564209 ], [ "▁Sort", -12.417085647583008 ], [ "▁Dennis", -12.41723918914795 ], [ "▁certificat", -12.417250633239746 ], [ "▁Canal", -12.417337417602539 ], [ "▁Yesterday", -12.417424201965332 ], [ "▁sausage", -12.417499542236328 ], [ "▁perdu", -12.417736053466797 ], [ "ösen", -12.417741775512695 ], [ "▁preserved", -12.417750358581543 ], [ "▁trendy", -12.4177885055542 ], [ "▁iubire", -12.417935371398926 ], [ "▁grandfather", -12.417961120605469 ], [ "▁shoppers", -12.41820240020752 ], [ "▁verschieden", -12.418252944946289 ], [ "▁gagner", -12.41826343536377 ], [ "▁lucra", -12.418437004089355 ], [ "metru", -12.418464660644531 ], [ "buz", -12.418469429016113 ], [ "▁flourish", -12.418484687805176 ], [ "affin", -12.418523788452148 ], [ "▁Pflanzen", -12.41858196258545 ], [ "agh", -12.418588638305664 ], [ "▁Gill", -12.418660163879395 ], [ "▁Kä", -12.418671607971191 ], [ "▁Wege", -12.41876220703125 ], [ "▁Liberal", -12.418929100036621 ], [ "▁Glasgow", -12.418944358825684 ], [ "Objekt", -12.4189453125 ], [ "▁Huawei", -12.4189453125 ], [ "appropri", -12.418986320495605 ], [ "▁genius", -12.419037818908691 ], [ "▁brokers", -12.419068336486816 ], [ "▁themed", -12.41918659210205 ], [ "▁barre", -12.419210433959961 ], [ "1.7", -12.419219017028809 ], [ "▁Electro", -12.419303894042969 ], [ "▁umbrella", -12.419333457946777 ], [ "▁advisory", -12.419417381286621 ], [ "▁comport", -12.419421195983887 ], [ "▁neuer", -12.419452667236328 ], [ "▁Wick", -12.419568061828613 ], [ "wak", -12.419618606567383 ], [ "▁Woman", -12.419695854187012 ], [ "▁lesser", -12.419843673706055 ], [ "▁replied", -12.419987678527832 ], [ "▁représente", -12.420050621032715 ], [ "▁thé", -12.420135498046875 ], [ "Deutsch", -12.420428276062012 ], [ "Cat", -12.420483589172363 ], [ "▁équipes", -12.420534133911133 ], [ "▁spider", -12.420578956604004 ], [ "▁Gaming", -12.420589447021484 ], [ "▁Liste", -12.420592308044434 ], [ "▁affection", -12.420639038085938 ], [ "lipsa", -12.420982360839844 ], [ "▁Spider", -12.420987129211426 ], [ "▁Julia", -12.421034812927246 ], [ "anlagen", -12.421159744262695 ], [ "Kon", -12.421363830566406 ], [ "nței", -12.421368598937988 ], [ "▁Verwaltung", -12.421483993530273 ], [ "▁raspuns", -12.421489715576172 ], [ "samt", -12.421491622924805 ], [ "▁creștere", -12.421512603759766 ], [ "▁decorate", -12.421701431274414 ], [ "▁Chain", -12.422021865844727 ], [ "ów", -12.422050476074219 ], [ "0-0", -12.422104835510254 ], [ "▁Cran", -12.422407150268555 ], [ "▁streak", -12.42242431640625 ], [ "ор", -12.422517776489258 ], [ "▁căuta", -12.422754287719727 ], [ "wende", -12.422801971435547 ], [ "▁haine", -12.42280387878418 ], [ "▁landscaping", -12.423009872436523 ], [ "▁historian", -12.423016548156738 ], [ "▁grandchildren", -12.423033714294434 ], [ "▁crawl", -12.423056602478027 ], [ "▁Cub", -12.423239707946777 ], [ "▁nécessaires", -12.423515319824219 ], [ "▁swift", -12.42352294921875 ], [ "▁calculation", -12.423656463623047 ], [ "▁acteurs", -12.423715591430664 ], [ "VT", -12.423752784729004 ], [ "▁Hristos", -12.423778533935547 ], [ "▁slices", -12.423850059509277 ], [ "See", -12.424203872680664 ], [ "▁Bran", -12.424233436584473 ], [ "Symbol", -12.424449920654297 ], [ "▁allowance", -12.424492835998535 ], [ "▁Effective", -12.424537658691406 ], [ "▁Wünsche", -12.424539566040039 ], [ "▁shiny", -12.424569129943848 ], [ "▁professionalism", -12.424715995788574 ], [ "/6", -12.424970626831055 ], [ "▁terrasse", -12.425087928771973 ], [ "▁researcher", -12.425156593322754 ], [ "▁fragile", -12.425203323364258 ], [ "▁greeting", -12.425274848937988 ], [ "freien", -12.4253511428833 ], [ "▁valuation", -12.425372123718262 ], [ "▁incur", -12.425386428833008 ], [ "▁Zwischen", -12.425559997558594 ], [ "▁comfy", -12.425569534301758 ], [ "▁méthode", -12.42569351196289 ], [ "▁Pirate", -12.425816535949707 ], [ "▁Moto", -12.425822257995605 ], [ "(6)", -12.425823211669922 ], [ "▁devin", -12.42582893371582 ], [ "▁civic", -12.425837516784668 ], [ "usage", -12.425889015197754 ], [ "▁istorie", -12.425945281982422 ], [ "▁piste", -12.425955772399902 ], [ "▁Rug", -12.426091194152832 ], [ "pä", -12.426129341125488 ], [ "▁matur", -12.426148414611816 ], [ "CAS", -12.426155090332031 ], [ "TIC", -12.42618465423584 ], [ "▁Reduce", -12.426234245300293 ], [ "▁commemorat", -12.426321983337402 ], [ "▁cease", -12.42653751373291 ], [ "unterschiedliche", -12.42656421661377 ], [ "▁cinnamon", -12.426581382751465 ], [ "▁Font", -12.426583290100098 ], [ "▁justify", -12.426751136779785 ], [ "deteriorat", -12.426797866821289 ], [ "▁Schön", -12.42684555053711 ], [ "plain", -12.426993370056152 ], [ "frist", -12.427002906799316 ], [ "▁helmet", -12.42712116241455 ], [ "▁statute", -12.42721939086914 ], [ "accept", -12.427236557006836 ], [ "▁1,5", -12.42724323272705 ], [ "▁recon", -12.42724323272705 ], [ "▁Möbel", -12.427348136901855 ], [ "▁idées", -12.427367210388184 ], [ "automat", -12.427552223205566 ], [ "Team", -12.42758846282959 ], [ "▁performers", -12.427688598632812 ], [ "▁microphone", -12.427722930908203 ], [ "impotriva", -12.427775382995605 ], [ "▁pillows", -12.42780876159668 ], [ "▁accountable", -12.427812576293945 ], [ "▁strings", -12.42782974243164 ], [ "hydrate", -12.427835464477539 ], [ "▁Yan", -12.427865028381348 ], [ "starea", -12.427918434143066 ], [ "▁présenté", -12.42793083190918 ], [ "▁extensively", -12.428048133850098 ], [ "äst", -12.428114891052246 ], [ "▁correlation", -12.428115844726562 ], [ "bespoke", -12.428119659423828 ], [ "▁creste", -12.428196907043457 ], [ "▁Armenia", -12.428248405456543 ], [ "nose", -12.428426742553711 ], [ "▁strengthening", -12.428604125976562 ], [ "▁Horizon", -12.428627014160156 ], [ "▁obesity", -12.428627967834473 ], [ "seasoned", -12.428686141967773 ], [ "▁screenshot", -12.428736686706543 ], [ "girl", -12.42875862121582 ], [ "▁hardest", -12.428826332092285 ], [ "▁weakness", -12.428855895996094 ], [ "effectuer", -12.429012298583984 ], [ "▁Florence", -12.429034233093262 ], [ "▁Europene", -12.429062843322754 ], [ "triggered", -12.429333686828613 ], [ "Apparently", -12.42939567565918 ], [ "▁diagnose", -12.42943286895752 ], [ "rushed", -12.429494857788086 ], [ "▁trotz", -12.429516792297363 ], [ "▁spécial", -12.429680824279785 ], [ "▁lumi", -12.429783821105957 ], [ "7:00", -12.429877281188965 ], [ "▁publicat", -12.429903984069824 ], [ "ос", -12.430086135864258 ], [ "▁hue", -12.430136680603027 ], [ "▁termination", -12.430139541625977 ], [ "▁Nam", -12.430240631103516 ], [ "Well", -12.430376052856445 ], [ "▁Extract", -12.430441856384277 ], [ "atiile", -12.43062686920166 ], [ "▁vivid", -12.43076229095459 ], [ "hrs", -12.430858612060547 ], [ "▁povesti", -12.430984497070312 ], [ "stehenden", -12.430988311767578 ], [ "▁informieren", -12.431070327758789 ], [ "employed", -12.431133270263672 ], [ "▁armor", -12.431180953979492 ], [ "▁Columbus", -12.431191444396973 ], [ "Registr", -12.431200981140137 ], [ "▁Kamera", -12.431203842163086 ], [ "▁ugly", -12.431203842163086 ], [ "outil", -12.431234359741211 ], [ "▁evenly", -12.43134593963623 ], [ "lungul", -12.431349754333496 ], [ "koch", -12.431439399719238 ], [ "▁Dig", -12.431450843811035 ], [ "purely", -12.431489944458008 ], [ "▁Surf", -12.431560516357422 ], [ "rilla", -12.431628227233887 ], [ "▁Watson", -12.43171215057373 ], [ "trug", -12.431719779968262 ], [ "figuring", -12.431784629821777 ], [ "▁competitor", -12.431807518005371 ], [ "▁humid", -12.431889533996582 ], [ "▁Lawyer", -12.43189811706543 ], [ "Added", -12.43205451965332 ], [ "▁salva", -12.432056427001953 ], [ "▁drainage", -12.4321870803833 ], [ "Featuring", -12.432220458984375 ], [ "▁Pel", -12.43234634399414 ], [ "▁acasa", -12.432611465454102 ], [ "▁expectation", -12.43265438079834 ], [ "gibt", -12.432663917541504 ], [ "▁marginal", -12.432831764221191 ], [ "ceni", -12.433028221130371 ], [ "▁européen", -12.433065414428711 ], [ "clav", -12.433090209960938 ], [ "▁Shot", -12.433167457580566 ], [ "commun", -12.43322467803955 ], [ "▁Calendar", -12.433247566223145 ], [ "▁trek", -12.433348655700684 ], [ "rechtliche", -12.433406829833984 ], [ "▁Perry", -12.43342399597168 ], [ "▁surge", -12.433484077453613 ], [ "geschäft", -12.433504104614258 ], [ "paced", -12.433793067932129 ], [ "depend", -12.433871269226074 ], [ "▁Sache", -12.433947563171387 ], [ "▁Example", -12.433998107910156 ], [ "▁lider", -12.434118270874023 ], [ "▁nochmal", -12.434240341186523 ], [ "▁Present", -12.434243202209473 ], [ "KW", -12.434335708618164 ], [ "prompted", -12.434350967407227 ], [ "logique", -12.434444427490234 ], [ "Université", -12.434466361999512 ], [ "lith", -12.434489250183105 ], [ "▁Gefahr", -12.434579849243164 ], [ "▁Acid", -12.434625625610352 ], [ "objets", -12.434791564941406 ], [ "▁societies", -12.434791564941406 ], [ "▁distraction", -12.434816360473633 ], [ "▁puissance", -12.434934616088867 ], [ "▁alleviat", -12.435026168823242 ], [ "▁Capitol", -12.435050010681152 ], [ "▁Heim", -12.435129165649414 ], [ "judicial", -12.435230255126953 ], [ "▁nowadays", -12.435309410095215 ], [ "▁Hammer", -12.435317039489746 ], [ "▁metallic", -12.435327529907227 ], [ "▁distr", -12.435388565063477 ], [ "▁dispos", -12.435397148132324 ], [ "profile", -12.435408592224121 ], [ "▁Nicolas", -12.435602188110352 ], [ "▁presa", -12.435760498046875 ], [ "augh", -12.43578052520752 ], [ "schuss", -12.435787200927734 ], [ "▁Diana", -12.436062812805176 ], [ "4-5", -12.436097145080566 ], [ "▁Chapel", -12.43612003326416 ], [ "▁zahar", -12.436150550842285 ], [ "âmb", -12.4362154006958 ], [ "▁Tarif", -12.436264991760254 ], [ "▁devastating", -12.436339378356934 ], [ "6:00", -12.4364013671875 ], [ "▁100,000", -12.43645191192627 ], [ "NIC", -12.436580657958984 ], [ "▁Lucas", -12.436612129211426 ], [ "▁bequem", -12.436662673950195 ], [ "▁Motion", -12.436698913574219 ], [ "7,000", -12.436701774597168 ], [ "▁malware", -12.436708450317383 ], [ "▁avenue", -12.436723709106445 ], [ "▁manger", -12.436747550964355 ], [ "▁Queensland", -12.436857223510742 ], [ "▁Papier", -12.436861991882324 ], [ "▁Increase", -12.436880111694336 ], [ "▁implies", -12.436954498291016 ], [ "▁äußer", -12.43697452545166 ], [ "▁Meine", -12.436980247497559 ], [ "Reuters", -12.437155723571777 ], [ "▁Belt", -12.437232971191406 ], [ "Educat", -12.437251091003418 ], [ "▁Aktion", -12.437355041503906 ], [ "schläge", -12.437372207641602 ], [ "▁înregistrat", -12.437426567077637 ], [ "▁Ortho", -12.43756103515625 ], [ "▁bulbs", -12.437761306762695 ], [ "kap", -12.437793731689453 ], [ "▁peinture", -12.437901496887207 ], [ "▁Lounge", -12.437907218933105 ], [ "▁Tampa", -12.438008308410645 ], [ "ifiziert", -12.438100814819336 ], [ "kinder", -12.438172340393066 ], [ "▁comparativ", -12.438281059265137 ], [ "häuser", -12.438323974609375 ], [ "incarn", -12.438363075256348 ], [ "▁amazon", -12.438464164733887 ], [ "▁Southeast", -12.438505172729492 ], [ "▁economical", -12.438667297363281 ], [ "▁broth", -12.438697814941406 ], [ "▁Secure", -12.438750267028809 ], [ "damals", -12.438875198364258 ], [ "▁Elementary", -12.438921928405762 ], [ "▁Wildlife", -12.438995361328125 ], [ "▁Jewel", -12.439001083374023 ], [ "▁protocols", -12.439297676086426 ], [ "▁zbor", -12.4393892288208 ], [ "▁enthusiasts", -12.439398765563965 ], [ "▁Mirror", -12.439444541931152 ], [ "▁soak", -12.439537048339844 ], [ "▁Sad", -12.439574241638184 ], [ "▁dishwasher", -12.439957618713379 ], [ "▁vollständig", -12.440186500549316 ], [ "▁Vermont", -12.440407752990723 ], [ "▁caut", -12.440449714660645 ], [ "▁fournisseur", -12.440475463867188 ], [ "▁Concrete", -12.44047737121582 ], [ "▁Instant", -12.440595626831055 ], [ "▁reveni", -12.440597534179688 ], [ "▁Surface", -12.44059944152832 ], [ "zumindest", -12.440713882446289 ], [ "▁feast", -12.440725326538086 ], [ "▁stretching", -12.440803527832031 ], [ "ERA", -12.440997123718262 ], [ "▁Scholarship", -12.441020965576172 ], [ "▁vineyard", -12.4410400390625 ], [ "▁régulièrement", -12.441083908081055 ], [ "▁patches", -12.441093444824219 ], [ "▁Gamb", -12.44113540649414 ], [ "▁Vereins", -12.441152572631836 ], [ "ège", -12.441372871398926 ], [ "▁constitutional", -12.441411018371582 ], [ "erreur", -12.441413879394531 ], [ "▁Colombia", -12.441514015197754 ], [ "UF", -12.441618919372559 ], [ "aider", -12.441665649414062 ], [ "cision", -12.44180965423584 ], [ "▁publishers", -12.441913604736328 ], [ "▁prelua", -12.441967964172363 ], [ "▁keiner", -12.441990852355957 ], [ "▁amid", -12.442020416259766 ], [ "▁quantitative", -12.442031860351562 ], [ "▁decay", -12.442058563232422 ], [ "▁distinguished", -12.4420747756958 ], [ "▁Gründe", -12.442209243774414 ], [ "▁statului", -12.442362785339355 ], [ "CAT", -12.442436218261719 ], [ "allow", -12.442481994628906 ], [ "▁mathematical", -12.442550659179688 ], [ "▁tragedy", -12.44255542755127 ], [ "▁heels", -12.442609786987305 ], [ "opia", -12.44265365600586 ], [ "▁merger", -12.4428071975708 ], [ "dispositif", -12.442813873291016 ], [ "▁pneu", -12.44283390045166 ], [ "elte", -12.443058013916016 ], [ "▁Introduction", -12.443070411682129 ], [ "▁biscuit", -12.443134307861328 ], [ "▁leftover", -12.443275451660156 ], [ "▁tester", -12.443314552307129 ], [ "▁Terre", -12.443380355834961 ], [ "▁Oui", -12.44338321685791 ], [ "▁rar", -12.443520545959473 ], [ "▁beverages", -12.443666458129883 ], [ "▁parenting", -12.443892478942871 ], [ "1-0", -12.444053649902344 ], [ "▁Barry", -12.44417667388916 ], [ "▁Lynn", -12.444209098815918 ], [ "▁Tyler", -12.444262504577637 ], [ "▁fotbal", -12.44437026977539 ], [ "dron", -12.444475173950195 ], [ "▁donor", -12.44455623626709 ], [ "▁drape", -12.444558143615723 ], [ "▁positioning", -12.444963455200195 ], [ "▁Tang", -12.445006370544434 ], [ "▁overwhelmed", -12.445161819458008 ], [ "▁perte", -12.445192337036133 ], [ "▁blender", -12.445302963256836 ], [ "TG", -12.445467948913574 ], [ "GHz", -12.445490837097168 ], [ "▁administrat", -12.445719718933105 ], [ "▁glaube", -12.445771217346191 ], [ "Char", -12.445947647094727 ], [ "impression", -12.44627571105957 ], [ "proving", -12.446297645568848 ], [ "▁Inner", -12.446434020996094 ], [ "root", -12.446501731872559 ], [ "▁Gedanken", -12.446508407592773 ], [ "▁underway", -12.446596145629883 ], [ "coat", -12.44660758972168 ], [ "▁thereof", -12.446663856506348 ], [ "rius", -12.446700096130371 ], [ "▁intermediate", -12.446751594543457 ], [ "gmail", -12.446869850158691 ], [ "114", -12.446893692016602 ], [ "▁interfere", -12.446908950805664 ], [ "▁Found", -12.446930885314941 ], [ "LF", -12.447071075439453 ], [ "▁equality", -12.447099685668945 ], [ "▁concurrent", -12.44710636138916 ], [ "akh", -12.447107315063477 ], [ "▁touching", -12.44715690612793 ], [ "▁curiosity", -12.447235107421875 ], [ "▁rendering", -12.447263717651367 ], [ "▁1964", -12.447442054748535 ], [ "sorge", -12.447468757629395 ], [ "ARC", -12.447505950927734 ], [ "▁Desktop", -12.44752311706543 ], [ "▁Tak", -12.44760799407959 ], [ "filtration", -12.447651863098145 ], [ "▁gates", -12.4478759765625 ], [ "Sehr", -12.44791316986084 ], [ "▁spatiu", -12.44798755645752 ], [ "▁Leg", -12.448103904724121 ], [ "▁aviation", -12.448277473449707 ], [ "wandel", -12.44827938079834 ], [ "▁Shar", -12.448323249816895 ], [ "▁Volks", -12.448409080505371 ], [ "maz", -12.448698997497559 ], [ "governmental", -12.44874095916748 ], [ "euros", -12.448819160461426 ], [ "avantage", -12.448823928833008 ], [ "sitzt", -12.448856353759766 ], [ "IER", -12.448920249938965 ], [ "▁Theory", -12.44894027709961 ], [ "Cependant", -12.44907283782959 ], [ "▁Teachers", -12.449080467224121 ], [ "anspruch", -12.449095726013184 ], [ "▁afecta", -12.449139595031738 ], [ "enko", -12.449193000793457 ], [ "▁breeding", -12.449198722839355 ], [ "▁Peak", -12.449457168579102 ], [ "▁găsit", -12.449516296386719 ], [ "▁măsuri", -12.4495267868042 ], [ "edia", -12.449625968933105 ], [ "biz", -12.449640274047852 ], [ "zum", -12.449776649475098 ], [ "▁schwierig", -12.449847221374512 ], [ "Sense", -12.450050354003906 ], [ "▁Jump", -12.450081825256348 ], [ "▁cocktails", -12.450108528137207 ], [ "abhängig", -12.45012378692627 ], [ "realised", -12.450140953063965 ], [ "▁programul", -12.450214385986328 ], [ "▁prévu", -12.450238227844238 ], [ "▁twitter", -12.450372695922852 ], [ "Union", -12.450400352478027 ], [ "▁Marathon", -12.45040225982666 ], [ "▁Christianity", -12.450432777404785 ], [ "▁Alberta", -12.450811386108398 ], [ "einheit", -12.45097827911377 ], [ "▁wellbeing", -12.450982093811035 ], [ "phen", -12.451166152954102 ], [ "▁Charleston", -12.451180458068848 ], [ "▁uncover", -12.451323509216309 ], [ "▁humaine", -12.451464653015137 ], [ "▁bleeding", -12.451531410217285 ], [ "▁manipul", -12.451532363891602 ], [ "▁humidity", -12.451570510864258 ], [ "▁Puis", -12.451748847961426 ], [ "▁aktuell", -12.451922416687012 ], [ "▁Nissan", -12.451943397521973 ], [ "▁Eisen", -12.45202922821045 ], [ "treiben", -12.452059745788574 ], [ "cios", -12.452073097229004 ], [ "ikh", -12.452381134033203 ], [ "acquiring", -12.452466011047363 ], [ "▁Wallpaper", -12.452488899230957 ], [ "▁rond", -12.452558517456055 ], [ "▁Doug", -12.45267391204834 ], [ "sourcing", -12.452696800231934 ], [ "▁1900", -12.452825546264648 ], [ "▁buni", -12.452913284301758 ], [ "vest", -12.452916145324707 ], [ "▁Bangladesh", -12.452990531921387 ], [ "Home", -12.453160285949707 ], [ "▁wrinkle", -12.453252792358398 ], [ "rado", -12.453290939331055 ], [ "▁Pain", -12.45334243774414 ], [ "▁herzlich", -12.453354835510254 ], [ "MRI", -12.453426361083984 ], [ "UG", -12.453631401062012 ], [ "▁Desk", -12.453679084777832 ], [ "▁remarc", -12.453718185424805 ], [ "▁sodium", -12.453857421875 ], [ "▁Jede", -12.453892707824707 ], [ "▁réelle", -12.453959465026855 ], [ "▁Polar", -12.454068183898926 ], [ "▁activists", -12.454273223876953 ], [ "lasted", -12.454300880432129 ], [ "Some", -12.45432186126709 ], [ "ISE", -12.454338073730469 ], [ "▁peine", -12.454671859741211 ], [ "▁crude", -12.454852104187012 ], [ "Maur", -12.454916954040527 ], [ "▁forcing", -12.454933166503906 ], [ "▁politici", -12.454970359802246 ], [ "▁condiții", -12.454988479614258 ], [ "▁Saving", -12.454999923706055 ], [ "▁descoperi", -12.455020904541016 ], [ "avenir", -12.455055236816406 ], [ "Akt", -12.455069541931152 ], [ "▁vocabulary", -12.45509147644043 ], [ "▁pont", -12.455168724060059 ], [ "West", -12.45518970489502 ], [ "lenk", -12.455278396606445 ], [ "▁Verbraucher", -12.455367088317871 ], [ "affects", -12.455448150634766 ], [ "▁Flower", -12.455543518066406 ], [ "▁Nebraska", -12.455617904663086 ], [ "▁assortment", -12.455618858337402 ], [ "hock", -12.455619812011719 ], [ "▁discounted", -12.455803871154785 ], [ "▁Sensor", -12.455840110778809 ], [ "Lie", -12.45588207244873 ], [ "▁Volkswagen", -12.455887794494629 ], [ "isseur", -12.455888748168945 ], [ "indice", -12.455936431884766 ], [ "▁scanner", -12.455986022949219 ], [ "fashioned", -12.456040382385254 ], [ "▁postal", -12.456141471862793 ], [ "ouvrir", -12.45615291595459 ], [ "▁seminars", -12.45622444152832 ], [ "ioase", -12.456232070922852 ], [ "▁Stanley", -12.456260681152344 ], [ "Various", -12.456335067749023 ], [ "essentiel", -12.45650577545166 ], [ "▁administered", -12.456693649291992 ], [ "▁concession", -12.456748008728027 ], [ "▁mould", -12.456789016723633 ], [ "▁strongest", -12.456826210021973 ], [ "Erlebnis", -12.456933975219727 ], [ "▁ehemalige", -12.456933975219727 ], [ "▁Tale", -12.457234382629395 ], [ "▁Buyer", -12.457353591918945 ], [ "ück", -12.457578659057617 ], [ "▁Kommentar", -12.457720756530762 ], [ "▁Schrift", -12.457756996154785 ], [ "Design", -12.457792282104492 ], [ "▁stirring", -12.457937240600586 ], [ "▁towels", -12.457987785339355 ], [ "▁$30", -12.458101272583008 ], [ "sprache", -12.458279609680176 ], [ "▁Regierung", -12.458346366882324 ], [ "▁nachhaltig", -12.458406448364258 ], [ "▁électronique", -12.458515167236328 ], [ "▁Andrei", -12.458587646484375 ], [ "because", -12.458647727966309 ], [ "informatique", -12.458650588989258 ], [ "IGHT", -12.4586820602417 ], [ "stepping", -12.4586820602417 ], [ "▁gris", -12.458748817443848 ], [ "vious", -12.458773612976074 ], [ "▁upside", -12.4591064453125 ], [ "▁Examples", -12.459108352661133 ], [ "IU", -12.459110260009766 ], [ "▁princess", -12.459111213684082 ], [ "spielen", -12.45921516418457 ], [ "legung", -12.45950984954834 ], [ "▁reflecting", -12.4597806930542 ], [ "▁Processing", -12.459939002990723 ], [ "▁jungle", -12.460033416748047 ], [ "▁insects", -12.46006965637207 ], [ "▁Sibiu", -12.460220336914062 ], [ "160", -12.460259437561035 ], [ "▁interessante", -12.460267066955566 ], [ "▁multimedia", -12.460455894470215 ], [ "essel", -12.46049690246582 ], [ "/18", -12.460647583007812 ], [ "nière", -12.460683822631836 ], [ "ministru", -12.46072006225586 ], [ "▁implants", -12.460826873779297 ], [ "▁Settings", -12.461360931396484 ], [ "▁invaluable", -12.461432456970215 ], [ "stains", -12.461448669433594 ], [ "onym", -12.461518287658691 ], [ "▁searched", -12.461570739746094 ], [ "▁disappointment", -12.461628913879395 ], [ "▁Iranian", -12.461630821228027 ], [ "▁questionnaire", -12.461630821228027 ], [ "Founder", -12.46178913116455 ], [ "▁Bericht", -12.461792945861816 ], [ "▁youngest", -12.461896896362305 ], [ "▁Automatic", -12.461956024169922 ], [ "▁plecat", -12.46203327178955 ], [ "geber", -12.462119102478027 ], [ "soweit", -12.462124824523926 ], [ "▁unfold", -12.462236404418945 ], [ "▁befinden", -12.462274551391602 ], [ "▁susţin", -12.462637901306152 ], [ "▁Mack", -12.462675094604492 ], [ "▁dificil", -12.462757110595703 ], [ "enseigne", -12.463038444519043 ], [ "▁vitamine", -12.463047981262207 ], [ "▁Memory", -12.463092803955078 ], [ "ripping", -12.463129043579102 ], [ "drin", -12.463146209716797 ], [ "3.2", -12.463278770446777 ], [ "▁verstehen", -12.463287353515625 ], [ "▁scaun", -12.46341323852539 ], [ "▁procédure", -12.46380615234375 ], [ "▁molecules", -12.463911056518555 ], [ "▁Anzahl", -12.46391487121582 ], [ "▁yogurt", -12.464071273803711 ], [ "▁Dominic", -12.464113235473633 ], [ "▁shocked", -12.464156150817871 ], [ "▁zilei", -12.464269638061523 ], [ "▁Heiz", -12.464412689208984 ], [ "▁Educational", -12.464571952819824 ], [ "BN", -12.464577674865723 ], [ "analyzing", -12.464601516723633 ], [ "hair", -12.464676856994629 ], [ "spiegel", -12.464871406555176 ], [ "▁illusion", -12.464889526367188 ], [ "BG", -12.46505355834961 ], [ "deductible", -12.46513557434082 ], [ "▁adj", -12.4651460647583 ], [ "▁accessory", -12.465166091918945 ], [ "▁Draw", -12.465167999267578 ], [ "▁airlines", -12.46518611907959 ], [ "▁satisfai", -12.46536636352539 ], [ "▁architects", -12.465447425842285 ], [ "istische", -12.465508460998535 ], [ "▁Healthy", -12.465539932250977 ], [ "großer", -12.465669631958008 ], [ "▁comunicare", -12.465764999389648 ], [ "▁Meyer", -12.46577262878418 ], [ "▁reproduction", -12.465882301330566 ], [ "▁Manufacturing", -12.465929985046387 ], [ "immobilier", -12.465930938720703 ], [ "▁Unterschied", -12.465958595275879 ], [ "▁cumpara", -12.466029167175293 ], [ "▁duplicate", -12.466094017028809 ], [ "▁(16", -12.466096878051758 ], [ "▁detector", -12.466279983520508 ], [ "▁observat", -12.466387748718262 ], [ "▁1965", -12.466682434082031 ], [ "▁Fantasy", -12.466728210449219 ], [ "▁brauchen", -12.466728210449219 ], [ "▁Participants", -12.466780662536621 ], [ "▁décide", -12.466817855834961 ], [ "▁kicke", -12.466819763183594 ], [ "▁SSL", -12.466885566711426 ], [ "360", -12.466989517211914 ], [ "Anim", -12.467019081115723 ], [ "▁cupcake", -12.467031478881836 ], [ "▁Lamb", -12.467107772827148 ], [ "▁Sä", -12.467155456542969 ], [ "ntă", -12.46738052368164 ], [ "▁Pig", -12.467421531677246 ], [ "1,000", -12.467677116394043 ], [ "nhof", -12.467782020568848 ], [ "▁discret", -12.467947959899902 ], [ "▁deloc", -12.467991828918457 ], [ "▁Bücher", -12.467999458312988 ], [ "chor", -12.468042373657227 ], [ "course", -12.468070030212402 ], [ "▁cough", -12.468076705932617 ], [ "▁erstellt", -12.468087196350098 ], [ "▁Than", -12.468097686767578 ], [ "stätte", -12.46812915802002 ], [ "▁exceptionally", -12.468162536621094 ], [ "▁semnal", -12.468186378479004 ], [ "▁Interessen", -12.468329429626465 ], [ "ле", -12.468356132507324 ], [ "xx", -12.468402862548828 ], [ "▁Veterans", -12.468422889709473 ], [ "▁Kreuz", -12.468683242797852 ], [ "▁Nachricht", -12.468701362609863 ], [ "treated", -12.468894004821777 ], [ "▁tide", -12.469230651855469 ], [ "▁nonetheless", -12.469390869140625 ], [ "▁Subject", -12.469439506530762 ], [ "▁Stau", -12.469440460205078 ], [ "▁stickers", -12.469463348388672 ], [ "Alp", -12.46950912475586 ], [ "▁flagship", -12.469541549682617 ], [ "▁trimite", -12.469619750976562 ], [ "▁polyester", -12.469664573669434 ], [ "▁locui", -12.469671249389648 ], [ "▁chili", -12.46968936920166 ], [ "▁Browser", -12.469808578491211 ], [ "sieg", -12.469809532165527 ], [ "▁Arabic", -12.469876289367676 ], [ "blich", -12.47001838684082 ], [ "▁wunderbar", -12.470090866088867 ], [ "▁furnishings", -12.470210075378418 ], [ "rtie", -12.470243453979492 ], [ "8.5", -12.470742225646973 ], [ "▁Sponsor", -12.471016883850098 ], [ "▁glitter", -12.471280097961426 ], [ "▁piaț", -12.471402168273926 ], [ "▁interviewed", -12.471519470214844 ], [ "▁Statistics", -12.471529006958008 ], [ "▁cerc", -12.47154712677002 ], [ "augmentation", -12.47155475616455 ], [ "▁Navi", -12.471558570861816 ], [ "▁Begriff", -12.47156047821045 ], [ "▁știu", -12.471596717834473 ], [ "▁unabhängig", -12.471778869628906 ], [ "▁könnten", -12.471978187561035 ], [ "▁travaille", -12.472000122070312 ], [ "▁companie", -12.472027778625488 ], [ "▁Scientific", -12.472061157226562 ], [ "▁Outlook", -12.472091674804688 ], [ "▁fairy", -12.472158432006836 ], [ "zam", -12.472282409667969 ], [ "bak", -12.472448348999023 ], [ "▁Traffic", -12.472596168518066 ], [ "gerät", -12.472671508789062 ], [ "▁freezing", -12.472701072692871 ], [ "▁broadband", -12.4727201461792 ], [ "110", -12.47279167175293 ], [ "▁revenu", -12.472887992858887 ], [ "listed", -12.472900390625 ], [ "▁Rico", -12.472941398620605 ], [ "Laure", -12.472990036010742 ], [ "ATA", -12.473112106323242 ], [ "▁participer", -12.47313117980957 ], [ "▁sponsorship", -12.473235130310059 ], [ "▁distress", -12.473286628723145 ], [ "▁Brisbane", -12.47339916229248 ], [ "schönen", -12.473437309265137 ], [ "▁fizice", -12.473465919494629 ], [ "▁Political", -12.47362232208252 ], [ "uhr", -12.473657608032227 ], [ "▁procedura", -12.473713874816895 ], [ "▁hervor", -12.473770141601562 ], [ "melted", -12.473776817321777 ], [ "▁Emp", -12.47384262084961 ], [ "▁Ernährung", -12.4739351272583 ], [ "▁Pendant", -12.473944664001465 ], [ "▁recipients", -12.474047660827637 ], [ "Claude", -12.474133491516113 ], [ "▁regimen", -12.47415828704834 ], [ "expo", -12.474346160888672 ], [ "adevăr", -12.47437858581543 ], [ "▁critically", -12.474440574645996 ], [ "▁grabbe", -12.474468231201172 ], [ "▁Kann", -12.474474906921387 ], [ "▁directeur", -12.474613189697266 ], [ "gator", -12.474908828735352 ], [ "problem", -12.474910736083984 ], [ "scribe", -12.474913597106934 ], [ "▁exig", -12.474920272827148 ], [ "Tri", -12.474969863891602 ], [ "▁aqua", -12.475631713867188 ], [ "appréci", -12.47569465637207 ], [ "▁viaţă", -12.47571849822998 ], [ "▁dominate", -12.475865364074707 ], [ "disc", -12.475889205932617 ], [ "▁conseiller", -12.47603988647461 ], [ "▁shuttle", -12.476180076599121 ], [ "▁Status", -12.47623062133789 ], [ "▁ausreichend", -12.476371765136719 ], [ "▁spät", -12.476411819458008 ], [ "▁remainder", -12.476417541503906 ], [ "wett", -12.476430892944336 ], [ "schlossen", -12.476491928100586 ], [ "PAC", -12.476505279541016 ], [ "▁suprafata", -12.476617813110352 ], [ "5.000", -12.476673126220703 ], [ "supplying", -12.47673225402832 ], [ "▁uniquely", -12.476905822753906 ], [ "▁retard", -12.476929664611816 ], [ "▁Bang", -12.477006912231445 ], [ "ieuse", -12.477087020874023 ], [ "▁Ted", -12.477248191833496 ], [ "▁ermöglichen", -12.47732925415039 ], [ "▁builders", -12.477380752563477 ], [ "▁proximité", -12.477423667907715 ], [ "▁unforgettable", -12.477423667907715 ], [ "256", -12.477446556091309 ], [ "fähigkeit", -12.477550506591797 ], [ "▁procurement", -12.477561950683594 ], [ "▁Gewicht", -12.477693557739258 ], [ "▁potentiel", -12.47778606414795 ], [ "▁topping", -12.478300094604492 ], [ "▁canada", -12.478304862976074 ], [ "▁Destin", -12.478355407714844 ], [ "▁Knowing", -12.478411674499512 ], [ "▁retained", -12.478426933288574 ], [ "▁zinc", -12.478470802307129 ], [ "▁worrying", -12.478655815124512 ], [ "faţa", -12.478676795959473 ], [ "▁initi", -12.478837966918945 ], [ "ORI", -12.4788818359375 ], [ "▁refuz", -12.478921890258789 ], [ "bruch", -12.479202270507812 ], [ "▁impun", -12.479233741760254 ], [ "▁persoană", -12.479308128356934 ], [ "EAR", -12.479347229003906 ], [ "bedarf", -12.479368209838867 ], [ "▁Gebiet", -12.47940731048584 ], [ "▁Roof", -12.479436874389648 ], [ "▁negligence", -12.47957706451416 ], [ "security", -12.479618072509766 ], [ "▁accesorii", -12.479641914367676 ], [ "▁unclear", -12.479667663574219 ], [ "▁securitate", -12.479848861694336 ], [ "▁spotlight", -12.479896545410156 ], [ "▁speziell", -12.479923248291016 ], [ "▁mentally", -12.479942321777344 ], [ "▁preservation", -12.48011589050293 ], [ "▁Promotion", -12.480156898498535 ], [ "partnered", -12.480274200439453 ], [ "▁Hinter", -12.48031997680664 ], [ "▁punishment", -12.480359077453613 ], [ "▁grease", -12.480713844299316 ], [ "▁NW", -12.480714797973633 ], [ "▁curse", -12.480897903442383 ], [ "ckle", -12.48101806640625 ], [ "▁Hire", -12.481043815612793 ], [ "▁Whole", -12.481088638305664 ], [ "▁basse", -12.481289863586426 ], [ "▁DNS", -12.481427192687988 ], [ "flamm", -12.481560707092285 ], [ "▁scoop", -12.481574058532715 ], [ "Norm", -12.481663703918457 ], [ "▁Surgery", -12.481735229492188 ], [ "▁widget", -12.481741905212402 ], [ "connected", -12.481863021850586 ], [ "autorité", -12.481961250305176 ], [ "▁utilis", -12.482096672058105 ], [ "▁formă", -12.482185363769531 ], [ "▁clearing", -12.482307434082031 ], [ "▁jumătate", -12.482815742492676 ], [ "größe", -12.482831954956055 ], [ "▁Tief", -12.482852935791016 ], [ "épi", -12.482939720153809 ], [ "zunehmen", -12.483174324035645 ], [ "▁touchdown", -12.48318099975586 ], [ "▁scholarships", -12.483236312866211 ], [ "▁dementia", -12.483319282531738 ], [ "▁Jeder", -12.48333740234375 ], [ "▁nightmare", -12.483379364013672 ], [ "▁Raw", -12.48342514038086 ], [ "absorbed", -12.483468055725098 ], [ "lohnt", -12.483484268188477 ], [ "quent", -12.483580589294434 ], [ "interest", -12.483626365661621 ], [ "OSS", -12.483649253845215 ], [ "▁Leaf", -12.483667373657227 ], [ "▁timeless", -12.48381519317627 ], [ "DY", -12.483865737915039 ], [ "▁Remote", -12.483907699584961 ], [ "chner", -12.483938217163086 ], [ "▁Pam", -12.484014511108398 ], [ "urban", -12.484060287475586 ], [ "во", -12.484146118164062 ], [ "▁Kunde", -12.484166145324707 ], [ "▁Laptop", -12.484169006347656 ], [ "finder", -12.484336853027344 ], [ "▁Pole", -12.484567642211914 ], [ "2.8", -12.484588623046875 ], [ "finished", -12.484670639038086 ], [ "▁prophet", -12.484697341918945 ], [ "mailed", -12.484758377075195 ], [ "2-0", -12.4849214553833 ], [ "▁disciples", -12.484949111938477 ], [ "▁intriguing", -12.484980583190918 ], [ "IRA", -12.485033988952637 ], [ "petit", -12.485077857971191 ], [ "▁Membership", -12.485097885131836 ], [ "▁provincial", -12.485177040100098 ], [ "▁Prüfung", -12.485292434692383 ], [ "-50", -12.485450744628906 ], [ "▁cryptocurrency", -12.485522270202637 ], [ "▁journalism", -12.485536575317383 ], [ "▁Downtown", -12.485593795776367 ], [ "inserted", -12.485655784606934 ], [ "▁Direction", -12.485718727111816 ], [ "lipid", -12.485732078552246 ], [ "▁Sebastian", -12.485793113708496 ], [ "fordert", -12.48591136932373 ], [ "Originally", -12.485989570617676 ], [ "tipp", -12.486048698425293 ], [ "verantwortlich", -12.486064910888672 ], [ "▁wheelchair", -12.486085891723633 ], [ "▁structura", -12.48609733581543 ], [ "▁Danny", -12.486138343811035 ], [ "999", -12.486284255981445 ], [ "▁Schiff", -12.486380577087402 ], [ "formally", -12.486408233642578 ], [ "focused", -12.486428260803223 ], [ "▁Vater", -12.486478805541992 ], [ "▁Dear", -12.486599922180176 ], [ "▁reinforce", -12.486794471740723 ], [ "proprietar", -12.48690414428711 ], [ "▁Kyle", -12.487004280090332 ], [ "În", -12.487015724182129 ], [ "▁servir", -12.487268447875977 ], [ "length", -12.48730754852295 ], [ "▁showroom", -12.48735237121582 ], [ "reli", -12.487473487854004 ], [ "▁Brü", -12.487529754638672 ], [ "▁Schle", -12.487634658813477 ], [ "▁profond", -12.487773895263672 ], [ "▁Superior", -12.487826347351074 ], [ "▁lifted", -12.487844467163086 ], [ "highlighting", -12.487850189208984 ], [ "▁Connection", -12.48793888092041 ], [ "▁similarly", -12.487998962402344 ], [ "▁diferit", -12.488005638122559 ], [ "▁sweater", -12.488014221191406 ], [ "État", -12.48803997039795 ], [ "rooted", -12.488069534301758 ], [ "▁sleeves", -12.488236427307129 ], [ "де", -12.488264083862305 ], [ "▁Laboratory", -12.488265991210938 ], [ "ündig", -12.488719940185547 ], [ "▁Viking", -12.488741874694824 ], [ "▁Origin", -12.48878002166748 ], [ "▁vibr", -12.488812446594238 ], [ "199", -12.488974571228027 ], [ "▁yummy", -12.489001274108887 ], [ "STAR", -12.489140510559082 ], [ "▁repro", -12.489152908325195 ], [ "▁Kirchen", -12.489229202270508 ], [ "hopper", -12.48925495147705 ], [ "zza", -12.489335060119629 ], [ "▁vitesse", -12.48934555053711 ], [ "▁minimalist", -12.489412307739258 ], [ "▁Election", -12.489420890808105 ], [ "draw", -12.489501953125 ], [ "▁candles", -12.48959732055664 ], [ "▁Mund", -12.489615440368652 ], [ "urged", -12.489901542663574 ], [ "▁cânt", -12.489917755126953 ], [ "Ultimately", -12.49002742767334 ], [ "▁Lift", -12.490124702453613 ], [ "loaded", -12.490334510803223 ], [ "demand", -12.490508079528809 ], [ "▁aleg", -12.490621566772461 ], [ "▁Discovery", -12.490755081176758 ], [ "▁Vienna", -12.490960121154785 ], [ "▁Kategorie", -12.490961074829102 ], [ "▁Cotton", -12.490962028503418 ], [ "▁$200", -12.491043090820312 ], [ "▁Drei", -12.491052627563477 ], [ "▁reicht", -12.491168975830078 ], [ "speicher", -12.491231918334961 ], [ "▁Immobilien", -12.491483688354492 ], [ "gefühl", -12.491509437561035 ], [ "make", -12.491525650024414 ], [ "pell", -12.49155044555664 ], [ "▁dull", -12.491598129272461 ], [ "▁arbeitet", -12.491681098937988 ], [ "retaining", -12.491700172424316 ], [ "losen", -12.491707801818848 ], [ "match", -12.491876602172852 ], [ "-60", -12.491880416870117 ], [ "▁ecological", -12.492000579833984 ], [ "▁vend", -12.492051124572754 ], [ "▁grammar", -12.492061614990234 ], [ "▁1:1", -12.492225646972656 ], [ "grilled", -12.492279052734375 ], [ "geordnet", -12.492321014404297 ], [ "▁Pav", -12.49236011505127 ], [ "▁Depot", -12.492368698120117 ], [ "▁Walking", -12.492372512817383 ], [ "teamed", -12.492402076721191 ], [ "▁torque", -12.492537498474121 ], [ "▁Venture", -12.492659568786621 ], [ "▁beginner", -12.49269962310791 ], [ "▁Monaten", -12.492712020874023 ], [ "▁Pune", -12.493054389953613 ], [ "connect", -12.493075370788574 ], [ "▁textbook", -12.493132591247559 ], [ "▁unprecedented", -12.49314022064209 ], [ "▁implied", -12.493168830871582 ], [ "▁cubic", -12.493668556213379 ], [ "enthält", -12.493696212768555 ], [ "▁Brenn", -12.49388313293457 ], [ "▁Expect", -12.49394416809082 ], [ "▁lever", -12.4939603805542 ], [ "veux", -12.49399185180664 ], [ "▁Claire", -12.494112968444824 ], [ "Acc", -12.49432373046875 ], [ "▁Typ", -12.494478225708008 ], [ "▁smoothie", -12.494501113891602 ], [ "▁Idaho", -12.494780540466309 ], [ "▁spati", -12.494802474975586 ], [ "▁bénéficier", -12.49488353729248 ], [ "▁Kle", -12.495161056518555 ], [ "▁serviciilor", -12.495169639587402 ], [ "▁prohibit", -12.495267868041992 ], [ "EAD", -12.495417594909668 ], [ "▁Turner", -12.495418548583984 ], [ "▁elibera", -12.49543571472168 ], [ "▁payday", -12.495464324951172 ], [ "▁prolong", -12.495466232299805 ], [ "▁sued", -12.495481491088867 ], [ "▁Devil", -12.495536804199219 ], [ "▁Skills", -12.495552062988281 ], [ "▁Marcel", -12.495553970336914 ], [ "▁silhouette", -12.495601654052734 ], [ "▁preț", -12.495742797851562 ], [ "▁Gö", -12.495747566223145 ], [ "▁Creator", -12.495774269104004 ], [ "fed", -12.4959077835083 ], [ "Cap", -12.495997428894043 ], [ "▁dedicate", -12.496042251586914 ], [ "0000", -12.496124267578125 ], [ "▁VAT", -12.496259689331055 ], [ "▁Firefox", -12.496443748474121 ], [ "▁therapies", -12.496477127075195 ], [ "▁screws", -12.496662139892578 ], [ "▁Province", -12.496697425842285 ], [ "▁problematic", -12.496871948242188 ], [ "▁Vid", -12.496915817260742 ], [ "▁Lost", -12.496950149536133 ], [ "▁elegance", -12.497520446777344 ], [ "▁Elegant", -12.497525215148926 ], [ "ignant", -12.497573852539062 ], [ "▁darin", -12.497649192810059 ], [ "▁anonym", -12.497669219970703 ], [ "▁vegeta", -12.49767780303955 ], [ "incoming", -12.497762680053711 ], [ "▁pills", -12.497846603393555 ], [ "governing", -12.497893333435059 ], [ "▁Haven", -12.497920989990234 ], [ "paper", -12.497947692871094 ], [ "räume", -12.497979164123535 ], [ "paw", -12.498099327087402 ], [ "▁spelling", -12.498283386230469 ], [ "ambele", -12.498318672180176 ], [ "▁reprezentat", -12.498371124267578 ], [ "▁mâ", -12.49853515625 ], [ "wirtschaftliche", -12.498558044433594 ], [ "▁valabil", -12.498579025268555 ], [ "▁konkret", -12.498618125915527 ], [ "▁financier", -12.498619079589844 ], [ "▁irre", -12.499135971069336 ], [ "▁Silicon", -12.499171257019043 ], [ "Viv", -12.499181747436523 ], [ "▁viruses", -12.49927043914795 ], [ "▁CNN", -12.499324798583984 ], [ "▁erleben", -12.499482154846191 ], [ "gina", -12.499492645263672 ], [ "punctul", -12.49951457977295 ], [ "▁Sfânt", -12.499753952026367 ], [ "▁Manage", -12.499811172485352 ], [ "▁payable", -12.499984741210938 ], [ "▁practitioner", -12.500143051147461 ], [ "▁conférence", -12.50026798248291 ], [ "▁drought", -12.50027084350586 ], [ "▁devote", -12.500361442565918 ], [ "wertung", -12.500420570373535 ], [ "stabil", -12.5004301071167 ], [ "▁balcon", -12.500553131103516 ], [ "▁Lebensmittel", -12.500603675842285 ], [ "COL", -12.500950813293457 ], [ "▁Domnul", -12.501093864440918 ], [ "carved", -12.501359939575195 ], [ "▁preparat", -12.5014009475708 ], [ "101", -12.501537322998047 ], [ "▁specimen", -12.501580238342285 ], [ "urgeon", -12.501596450805664 ], [ "LIC", -12.50163459777832 ], [ "Plattform", -12.501643180847168 ], [ "▁ramas", -12.501739501953125 ], [ "▁copilului", -12.501791954040527 ], [ "bacter", -12.501812934875488 ], [ "körper", -12.501940727233887 ], [ "▁Kru", -12.501981735229492 ], [ "▁Employ", -12.502055168151855 ], [ "office", -12.502080917358398 ], [ "▁simmer", -12.502120018005371 ], [ "qualität", -12.502137184143066 ], [ "▁freshly", -12.502215385437012 ], [ "▁Nine", -12.50223159790039 ], [ "▁tonnes", -12.50223445892334 ], [ "boden", -12.502236366271973 ], [ "enquête", -12.50240707397461 ], [ "▁Colour", -12.502481460571289 ], [ "▁Diagram", -12.502495765686035 ], [ "▁gewählt", -12.502516746520996 ], [ "▁viitoare", -12.502538681030273 ], [ "▁reporters", -12.502913475036621 ], [ "guer", -12.502991676330566 ], [ "▁Kombination", -12.503021240234375 ], [ "▁qualitative", -12.50302505493164 ], [ "Centrul", -12.503131866455078 ], [ "avy", -12.503170013427734 ], [ "▁Eng", -12.503175735473633 ], [ "▁sufletul", -12.50327205657959 ], [ "▁germ", -12.503412246704102 ], [ "▁prevented", -12.503448486328125 ], [ "appelle", -12.503533363342285 ], [ "gins", -12.503556251525879 ], [ "▁Skype", -12.503585815429688 ], [ "conditioned", -12.503617286682129 ], [ "▁clutch", -12.503641128540039 ], [ "environ", -12.503694534301758 ], [ "3.3", -12.503774642944336 ], [ "▁webinar", -12.503866195678711 ], [ "▁forty", -12.504104614257812 ], [ "▁Medicaid", -12.504127502441406 ], [ "▁dismissed", -12.504167556762695 ], [ "▁siblings", -12.504168510437012 ], [ "▁Jaw", -12.504196166992188 ], [ "guiding", -12.504220962524414 ], [ "cigarette", -12.504374504089355 ], [ "▁Shah", -12.504681587219238 ], [ "▁Lehrer", -12.504684448242188 ], [ "▁muscular", -12.504694938659668 ], [ "spatele", -12.504796981811523 ], [ "▁réduction", -12.504836082458496 ], [ "▁fixes", -12.504851341247559 ], [ "Span", -12.50511646270752 ], [ "▁Hudson", -12.505231857299805 ], [ "development", -12.505250930786133 ], [ "▁excluded", -12.50525951385498 ], [ "Democrat", -12.505260467529297 ], [ "▁nominal", -12.505317687988281 ], [ "purpose", -12.50540828704834 ], [ "▁bored", -12.505500793457031 ], [ "espèce", -12.50550651550293 ], [ "▁(30", -12.5055570602417 ], [ "Neither", -12.505608558654785 ], [ "hänge", -12.505610466003418 ], [ "square", -12.505728721618652 ], [ "voller", -12.505736351013184 ], [ "▁pertinent", -12.505783081054688 ], [ "▁Wool", -12.50595474243164 ], [ "settling", -12.50607681274414 ], [ "fangen", -12.506148338317871 ], [ "▁Testing", -12.506152153015137 ], [ "distin", -12.506196022033691 ], [ "▁Marken", -12.506227493286133 ], [ "▁Beta", -12.506300926208496 ], [ "▁fulfilling", -12.506339073181152 ], [ "Leider", -12.506357192993164 ], [ "black", -12.506389617919922 ], [ "occupe", -12.50658893585205 ], [ "itățile", -12.506688117980957 ], [ "Pay", -12.506887435913086 ], [ "▁bandwidth", -12.506890296936035 ], [ "▁neighbourhood", -12.506918907165527 ], [ "▁Gutschein", -12.506922721862793 ], [ "degree", -12.507055282592773 ], [ "ivité", -12.507116317749023 ], [ "4.1", -12.507169723510742 ], [ "▁tätig", -12.507170677185059 ], [ "topic", -12.507242202758789 ], [ "ätz", -12.507243156433105 ], [ "these", -12.50733470916748 ], [ "▁propriété", -12.507438659667969 ], [ "▁innings", -12.507458686828613 ], [ "▁Prevention", -12.50754165649414 ], [ "▁Saw", -12.507585525512695 ], [ "▁opener", -12.507752418518066 ], [ "entwicklung", -12.507824897766113 ], [ "▁Johann", -12.507865905761719 ], [ "▁statistic", -12.507881164550781 ], [ "oids", -12.507966995239258 ], [ "▁Delaware", -12.508000373840332 ], [ "▁Isle", -12.508001327514648 ], [ "▁accompagn", -12.508028984069824 ], [ "▁Risiko", -12.508079528808594 ], [ "▁Conform", -12.508268356323242 ], [ "zeichnen", -12.508395195007324 ], [ "▁acuz", -12.508479118347168 ], [ "▁Mort", -12.508524894714355 ], [ "Fällen", -12.50853157043457 ], [ "▁blended", -12.50871467590332 ], [ "found", -12.50872802734375 ], [ "▁gestalten", -12.50874137878418 ], [ "▁Découvrez", -12.508830070495605 ], [ "▁Wett", -12.508956909179688 ], [ "▁débat", -12.508990287780762 ], [ "▁Tire", -12.509007453918457 ], [ "benz", -12.509037017822266 ], [ "Yes", -12.509074211120605 ], [ "▁pierde", -12.509110450744629 ], [ "▁niciodata", -12.509121894836426 ], [ "▁precipit", -12.509145736694336 ], [ "▁lazy", -12.509334564208984 ], [ "▁creature", -12.509370803833008 ], [ "Wettbewerb", -12.509385108947754 ], [ "▁Explo", -12.509496688842773 ], [ "wolf", -12.509657859802246 ], [ "▁conséquence", -12.509662628173828 ], [ "▁jewellery", -12.509662628173828 ], [ "▁Extension", -12.509735107421875 ], [ "▁transmitted", -12.509872436523438 ], [ "▁darker", -12.509973526000977 ], [ "▁simbol", -12.510065078735352 ], [ "kim", -12.510069847106934 ], [ "▁proteja", -12.510098457336426 ], [ "▁Copper", -12.510189056396484 ], [ "mitglied", -12.510218620300293 ], [ "▁explosive", -12.510222434997559 ], [ "▁Nicolae", -12.510223388671875 ], [ "▁intricate", -12.510231971740723 ], [ "lati", -12.510313034057617 ], [ "Mark", -12.510334014892578 ], [ "▁Porsche", -12.510339736938477 ], [ "▁Revenue", -12.510479927062988 ], [ "4.2", -12.510613441467285 ], [ "certain", -12.510836601257324 ], [ "▁Coaching", -12.510879516601562 ], [ "▁allocated", -12.510879516601562 ], [ "▁optimiz", -12.511017799377441 ], [ "▁heel", -12.511205673217773 ], [ "▁indigenous", -12.511330604553223 ], [ "▁vineri", -12.511396408081055 ], [ "▁Inspector", -12.51145076751709 ], [ "▁colleague", -12.5115327835083 ], [ "ANG", -12.511649131774902 ], [ "éducation", -12.511887550354004 ], [ "▁Geschenk", -12.51188850402832 ], [ "channel", -12.511899948120117 ], [ "▁trapped", -12.511954307556152 ], [ "BF", -12.511974334716797 ], [ "▁firing", -12.512086868286133 ], [ "▁chlor", -12.512103080749512 ], [ "▁Carlos", -12.512115478515625 ], [ "▁proxy", -12.512128829956055 ], [ "▁pinch", -12.512167930603027 ], [ "▁Pete", -12.512201309204102 ], [ "phospho", -12.512458801269531 ], [ "▁waiver", -12.51246452331543 ], [ "▁Croatia", -12.512480735778809 ], [ "▁behave", -12.51258373260498 ], [ "▁frig", -12.512676239013672 ], [ "▁Vorteil", -12.51279067993164 ], [ "▁wichtiger", -12.512837409973145 ], [ "........", -12.512929916381836 ], [ "▁flick", -12.513007164001465 ], [ "▁Stanford", -12.51306438446045 ], [ "öse", -12.513096809387207 ], [ "▁Fernseh", -12.513099670410156 ], [ "▁vélo", -12.51322078704834 ], [ "reisen", -12.513304710388184 ], [ "residing", -12.513504981994629 ], [ "▁Taste", -12.513580322265625 ], [ "▁disappeared", -12.513630867004395 ], [ "▁Hood", -12.513776779174805 ], [ "▁fabriqu", -12.514046669006348 ], [ "▁Jake", -12.514470100402832 ], [ "Lastly", -12.51462173461914 ], [ "▁furnace", -12.514673233032227 ], [ "▁Ottawa", -12.51473331451416 ], [ "▁dictate", -12.514742851257324 ], [ "zece", -12.514817237854004 ], [ "protect", -12.514932632446289 ], [ "FU", -12.51495361328125 ], [ "Stack", -12.514954566955566 ], [ "▁teilweise", -12.515018463134766 ], [ "▁Publisher", -12.51506233215332 ], [ "▁lutte", -12.515159606933594 ], [ "202", -12.515178680419922 ], [ "psy", -12.515190124511719 ], [ "▁wünschen", -12.515238761901855 ], [ "▁pathways", -12.515356063842773 ], [ "ivitate", -12.515559196472168 ], [ "▁continuă", -12.515658378601074 ], [ "ziemlich", -12.515791893005371 ], [ "verted", -12.515812873840332 ], [ "▁sequel", -12.515839576721191 ], [ "tinct", -12.51599407196045 ], [ "vette", -12.516020774841309 ], [ "▁exceeding", -12.516032218933105 ], [ "▁Yorkshire", -12.51607608795166 ], [ "▁cleanse", -12.51613998413086 ], [ "Sadly", -12.516159057617188 ], [ "▁präsentiert", -12.516164779663086 ], [ "angled", -12.516311645507812 ], [ "tude", -12.516339302062988 ], [ "chain", -12.516371726989746 ], [ "▁Oakland", -12.51639175415039 ], [ "xia", -12.516514778137207 ], [ "▁foremost", -12.51653003692627 ], [ "▁incomplete", -12.516786575317383 ], [ "▁restriction", -12.516905784606934 ], [ "▁whatsoever", -12.516908645629883 ], [ "▁shipment", -12.517017364501953 ], [ "**", -12.517059326171875 ], [ "Aici", -12.517110824584961 ], [ "PART", -12.517247200012207 ], [ "▁grams", -12.517251014709473 ], [ "▁Folk", -12.517457008361816 ], [ "▁encryption", -12.517467498779297 ], [ "▁Alfred", -12.517748832702637 ], [ "▁Veränderung", -12.517749786376953 ], [ "▁privately", -12.517817497253418 ], [ "£", -12.517909049987793 ], [ "▁Sonne", -12.51799201965332 ], [ "kow", -12.518117904663086 ], [ "▁CBS", -12.518172264099121 ], [ "▁Feuer", -12.518198013305664 ], [ "▁crushed", -12.518230438232422 ], [ "▁cazare", -12.518270492553711 ], [ "▁beraten", -12.518401145935059 ], [ "envoi", -12.518423080444336 ], [ "▁genannt", -12.51843547821045 ], [ "▁Lok", -12.518472671508789 ], [ "nox", -12.518569946289062 ], [ "wishing", -12.518759727478027 ], [ "▁freak", -12.518759727478027 ], [ "rasi", -12.51879596710205 ], [ "▁calculations", -12.518888473510742 ], [ "▁sprechen", -12.51890754699707 ], [ "5:00", -12.519062042236328 ], [ "▁Gam", -12.519074440002441 ], [ "▁invasion", -12.519159317016602 ], [ "ZA", -12.519230842590332 ], [ "aiming", -12.519327163696289 ], [ "▁näher", -12.519404411315918 ], [ "▁Maßnahmen", -12.519433975219727 ], [ "▁măsură", -12.519490242004395 ], [ "▁Bestellung", -12.519610404968262 ], [ "▁gown", -12.519665718078613 ], [ "▁oblige", -12.519747734069824 ], [ "länder", -12.51977825164795 ], [ "posi", -12.519853591918945 ], [ "▁Earn", -12.51988410949707 ], [ "▁dubl", -12.51999282836914 ], [ "▁sticky", -12.520100593566895 ], [ "▁litter", -12.520181655883789 ], [ "▁Salz", -12.520257949829102 ], [ "▁Matter", -12.520272254943848 ], [ "▁Driving", -12.520275115966797 ], [ "▁pursu", -12.520285606384277 ], [ "ographer", -12.520390510559082 ], [ "▁touring", -12.520400047302246 ], [ "opter", -12.520444869995117 ], [ "▁fierce", -12.520475387573242 ], [ "▁Audit", -12.520480155944824 ], [ "▁imperi", -12.520755767822266 ], [ "▁positiv", -12.520780563354492 ], [ "règles", -12.520849227905273 ], [ "▁bouton", -12.520990371704102 ], [ "▁victorie", -12.520990371704102 ], [ "▁manuel", -12.521015167236328 ], [ "▁await", -12.52103042602539 ], [ "▁transformer", -12.521041870117188 ], [ "▁cupboard", -12.52108383178711 ], [ "▁Hag", -12.521117210388184 ], [ "naj", -12.521214485168457 ], [ "▁annoncé", -12.52139663696289 ], [ "▁scolaire", -12.521401405334473 ], [ "▁étape", -12.521482467651367 ], [ "▁pirate", -12.521761894226074 ], [ "▁Rated", -12.521794319152832 ], [ "LOT", -12.521846771240234 ], [ "▁natura", -12.521944046020508 ], [ "oga", -12.522336959838867 ], [ "Read", -12.522388458251953 ], [ "idio", -12.522444725036621 ], [ "▁recession", -12.522698402404785 ], [ "veţi", -12.522761344909668 ], [ "▁blossom", -12.523082733154297 ], [ "▁lunar", -12.523141860961914 ], [ "▁inhibit", -12.52316951751709 ], [ "gemein", -12.523219108581543 ], [ "▁Historic", -12.523262023925781 ], [ "▁HTTP", -12.523370742797852 ], [ "misiune", -12.5234956741333 ], [ "▁Manda", -12.523601531982422 ], [ "▁Hurricane", -12.523643493652344 ], [ "Strat", -12.523646354675293 ], [ "▁populaire", -12.523756980895996 ], [ "▁useless", -12.523762702941895 ], [ "▁Leipzig", -12.523924827575684 ], [ "▁Krankheit", -12.52392578125 ], [ "▁Bonne", -12.52397346496582 ], [ "▁tissu", -12.52399730682373 ], [ "▁Baum", -12.523998260498047 ], [ "▁BUT", -12.524152755737305 ], [ "▁Mondial", -12.52423095703125 ], [ "▁triangle", -12.524242401123047 ], [ "▁Tesla", -12.524250984191895 ], [ "▁pământ", -12.52430534362793 ], [ "▁aminte", -12.524726867675781 ], [ "▁vehicul", -12.524770736694336 ], [ "▁cerut", -12.52482795715332 ], [ "▁respiratory", -12.524836540222168 ], [ "▁rayon", -12.524993896484375 ], [ "▁gestaltet", -12.525067329406738 ], [ "310", -12.525139808654785 ], [ "pfl", -12.525239944458008 ], [ "▁shrimp", -12.525337219238281 ], [ "▁reconnu", -12.525409698486328 ], [ "ologique", -12.525476455688477 ], [ "▁unity", -12.525674819946289 ], [ "Speicher", -12.52569580078125 ], [ "▁Movement", -12.525794982910156 ], [ "ddling", -12.52581787109375 ], [ "OE", -12.525818824768066 ], [ "▁Resolution", -12.525863647460938 ], [ "esteem", -12.525898933410645 ], [ "▁Teen", -12.526288986206055 ], [ "▁believing", -12.526463508605957 ], [ "▁Tipps", -12.526481628417969 ], [ "jpg", -12.526494026184082 ], [ "▁obs", -12.526519775390625 ], [ "SHA", -12.526702880859375 ], [ "▁quietly", -12.526907920837402 ], [ "setting", -12.52712345123291 ], [ "▁elevator", -12.527185440063477 ], [ "phor", -12.527194023132324 ], [ "Just", -12.52725887298584 ], [ "▁legatura", -12.52739143371582 ], [ "elected", -12.527414321899414 ], [ "▁disclosed", -12.527419090270996 ], [ "quarter", -12.52743148803711 ], [ "zzy", -12.527461051940918 ], [ "▁gata", -12.527491569519043 ], [ "SAN", -12.527532577514648 ], [ "▁Cathedral", -12.527592658996582 ], [ "192", -12.527656555175781 ], [ "▁RBI", -12.527726173400879 ], [ "▁Seller", -12.527798652648926 ], [ "▁urine", -12.527807235717773 ], [ "▁Hardware", -12.527966499328613 ], [ "▁steadi", -12.527993202209473 ], [ "percussion", -12.528158187866211 ], [ "▁francez", -12.528172492980957 ], [ "▁rude", -12.528202056884766 ], [ "bod", -12.528223037719727 ], [ "cession", -12.528249740600586 ], [ "▁HTC", -12.528372764587402 ], [ "HB", -12.528576850891113 ], [ "▁descent", -12.528644561767578 ], [ "▁Painting", -12.528681755065918 ], [ "119", -12.528684616088867 ], [ "sagen", -12.52877426147461 ], [ "▁salvation", -12.52880573272705 ], [ "arro", -12.528814315795898 ], [ "0.3", -12.52886962890625 ], [ "▁Duck", -12.52890396118164 ], [ "Mit", -12.529052734375 ], [ "да", -12.52927017211914 ], [ "▁Diesel", -12.529322624206543 ], [ "▁Medal", -12.529413223266602 ], [ "▁interim", -12.529439926147461 ], [ "▁montagne", -12.529439926147461 ], [ "▁Pixel", -12.529631614685059 ], [ "LINE", -12.529806137084961 ], [ "▁dureri", -12.529938697814941 ], [ "▁Bengal", -12.529990196228027 ], [ "Legea", -12.530080795288086 ], [ "▁Strecke", -12.530094146728516 ], [ "▁schneller", -12.53012752532959 ], [ "▁Karten", -12.5301513671875 ], [ "cion", -12.530241966247559 ], [ "▁Coco", -12.53037166595459 ], [ "troisième", -12.53052806854248 ], [ "401", -12.530616760253906 ], [ "▁sandwiches", -12.530704498291016 ], [ "▁folosind", -12.530920028686523 ], [ "▁Folgen", -12.530953407287598 ], [ "▁triumph", -12.530991554260254 ], [ "▁Hintergrund", -12.530996322631836 ], [ "▁revelation", -12.531084060668945 ], [ "ôme", -12.531222343444824 ], [ "▁Nex", -12.531245231628418 ], [ "jährigen", -12.531295776367188 ], [ "▁militant", -12.531296730041504 ], [ "▁fabricant", -12.531671524047852 ], [ "iano", -12.531713485717773 ], [ "▁formulation", -12.53188705444336 ], [ "integrating", -12.532050132751465 ], [ "▁Items", -12.532142639160156 ], [ "▁contractual", -12.532320976257324 ], [ "AIDS", -12.532424926757812 ], [ "▁pitcher", -12.532610893249512 ], [ "▁Snap", -12.532623291015625 ], [ "▁systematic", -12.532663345336914 ], [ "▁referendum", -12.532694816589355 ], [ "gau", -12.53281021118164 ], [ "administration", -12.532917022705078 ], [ "▁speci", -12.532981872558594 ], [ "ieni", -12.532998085021973 ], [ "prox", -12.533186912536621 ], [ "▁bouquet", -12.533241271972656 ], [ "▁sinnvoll", -12.533270835876465 ], [ "▁Fleisch", -12.533309936523438 ], [ "ktuell", -12.533381462097168 ], [ "▁mushrooms", -12.533408164978027 ], [ "▁Straf", -12.533470153808594 ], [ "▁cresc", -12.533491134643555 ], [ "TEM", -12.533502578735352 ], [ "▁vindec", -12.53352165222168 ], [ "▁Drama", -12.533540725708008 ], [ "chief", -12.533550262451172 ], [ "▁müsst", -12.533614158630371 ], [ "▁Warner", -12.533662796020508 ], [ "118", -12.533761024475098 ], [ "▁saptamana", -12.533831596374512 ], [ "▁animaux", -12.53412914276123 ], [ "▁Directory", -12.534146308898926 ], [ "▁entgegen", -12.53415584564209 ], [ "▁deduction", -12.534156799316406 ], [ "▁Strategic", -12.53426456451416 ], [ "▁rats", -12.534419059753418 ], [ "▁Moses", -12.534448623657227 ], [ "eko", -12.534564971923828 ], [ "strict", -12.534590721130371 ], [ "▁Ashley", -12.534603118896484 ], [ "mik", -12.534622192382812 ], [ "▁relocate", -12.534668922424316 ], [ "▁whip", -12.534738540649414 ], [ "central", -12.534750938415527 ], [ "mack", -12.534892082214355 ], [ "stufe", -12.534961700439453 ], [ "▁Metropolitan", -12.5349702835083 ], [ "▁croissance", -12.534974098205566 ], [ "▁celebrities", -12.535021781921387 ], [ "▁Geh", -12.53507137298584 ], [ "▁verifica", -12.535196304321289 ], [ "▁satisfac", -12.535211563110352 ], [ "▁Julian", -12.535271644592285 ], [ "▁remotely", -12.535432815551758 ], [ "▁Safari", -12.535542488098145 ], [ "▁Chic", -12.53557014465332 ], [ "▁clamp", -12.535818099975586 ], [ "▁Schnee", -12.535918235778809 ], [ "grown", -12.536069869995117 ], [ "▁Character", -12.536110877990723 ], [ "▁charities", -12.536137580871582 ], [ "Thankfully", -12.536625862121582 ], [ "▁țară", -12.53681468963623 ], [ "IZ", -12.536816596984863 ], [ "Vielleicht", -12.536999702453613 ], [ "▁Pon", -12.537108421325684 ], [ "gegen", -12.53711986541748 ], [ "chez", -12.537185668945312 ], [ "Black", -12.537544250488281 ], [ "▁alimentare", -12.537555694580078 ], [ "▁verloren", -12.537562370300293 ], [ "▁predictions", -12.537657737731934 ], [ "Founded", -12.53795337677002 ], [ "▁femeie", -12.538022994995117 ], [ "wahrscheinlich", -12.538107872009277 ], [ "▁squeeze", -12.53819465637207 ], [ "▁verfügbar", -12.538259506225586 ], [ "▁hygiene", -12.538393020629883 ], [ "voire", -12.538667678833008 ], [ "▁birou", -12.538901329040527 ], [ "▁initiate", -12.538921356201172 ], [ "▁Patriot", -12.539009094238281 ], [ "▁Income", -12.539159774780273 ], [ "▁marry", -12.539310455322266 ], [ "lokal", -12.539336204528809 ], [ "logic", -12.53940486907959 ], [ "▁Abstract", -12.53966236114502 ], [ "▁grundsätzlich", -12.539822578430176 ], [ "▁tariff", -12.539886474609375 ], [ "▁definitiv", -12.539892196655273 ], [ "paz", -12.53989315032959 ], [ "Result", -12.539921760559082 ], [ "1:30", -12.54005241394043 ], [ "▁Latest", -12.540075302124023 ], [ "▁Dauer", -12.540155410766602 ], [ "Med", -12.540275573730469 ], [ "gewicht", -12.540348052978516 ], [ "▁Gaza", -12.540430068969727 ], [ "▁Newton", -12.540769577026367 ], [ "Dokument", -12.540897369384766 ], [ "formular", -12.540945053100586 ], [ "ILE", -12.540964126586914 ], [ "▁surse", -12.541040420532227 ], [ "MH", -12.54116153717041 ], [ "▁Arctic", -12.541255950927734 ], [ "▁ISBN", -12.541274070739746 ], [ "▁quarterback", -12.541315078735352 ], [ "▁absurd", -12.541555404663086 ], [ "▁Zusammenhang", -12.541561126708984 ], [ "▁Module", -12.54156494140625 ], [ "mented", -12.541667938232422 ], [ "worthy", -12.541797637939453 ], [ "▁célèbre", -12.541828155517578 ], [ "▁maritime", -12.541836738586426 ], [ "▁Reed", -12.541938781738281 ], [ "▁threaten", -12.542037010192871 ], [ "▁Satz", -12.542095184326172 ], [ "▁sticking", -12.542203903198242 ], [ "▁transcript", -12.542372703552246 ], [ "▁Morgen", -12.542425155639648 ], [ "▁Förder", -12.542435646057129 ], [ "▁Gottes", -12.542572021484375 ], [ "▁Coordinator", -12.542648315429688 ], [ "LOG", -12.54265022277832 ], [ "EAN", -12.542677879333496 ], [ "▁préparation", -12.54273509979248 ], [ "▁Brass", -12.542799949645996 ], [ "Așa", -12.542853355407715 ], [ "▁Utiliz", -12.54294490814209 ], [ "framed", -12.542973518371582 ], [ "▁asphalt", -12.543050765991211 ], [ "116", -12.543061256408691 ], [ "▁historically", -12.54310417175293 ], [ "▁doamn", -12.543176651000977 ], [ "Air", -12.543293952941895 ], [ "▁economist", -12.543838500976562 ], [ "fresh", -12.54384994506836 ], [ "engine", -12.543906211853027 ], [ "▁Rücken", -12.543919563293457 ], [ "▁worthwhile", -12.544124603271484 ], [ "▁Therapie", -12.544140815734863 ], [ "▁Joshua", -12.544151306152344 ], [ "sicherheit", -12.544175148010254 ], [ "▁scena", -12.544254302978516 ], [ "ifiant", -12.54433822631836 ], [ "/20", -12.54442024230957 ], [ "fehl", -12.544469833374023 ], [ "karten", -12.544515609741211 ], [ "501", -12.544656753540039 ], [ "▁vide", -12.544673919677734 ], [ "▁miliarde", -12.544699668884277 ], [ "▁trillion", -12.54470157623291 ], [ "oudre", -12.544761657714844 ], [ "nderung", -12.544803619384766 ], [ "▁inquiries", -12.544992446899414 ], [ "▁echipe", -12.545034408569336 ], [ "▁investiga", -12.545040130615234 ], [ "▁detailing", -12.545042991638184 ], [ "VIS", -12.545086860656738 ], [ "▁geographical", -12.545157432556152 ], [ "▁authentication", -12.54519271850586 ], [ "▁Schwa", -12.545201301574707 ], [ "▁Scri", -12.545230865478516 ], [ "▁discourage", -12.54527473449707 ], [ "Pass", -12.54529094696045 ], [ "▁scattered", -12.54529857635498 ], [ "▁langsam", -12.545300483703613 ], [ "telles", -12.545380592346191 ], [ "▁ramane", -12.5454740524292 ], [ "▁inhibitor", -12.545486450195312 ], [ "▁Habit", -12.54556941986084 ], [ "▁10:00", -12.545577049255371 ], [ "▁rezultat", -12.545595169067383 ], [ "äck", -12.545943260192871 ], [ ",000.", -12.545979499816895 ], [ "▁remedies", -12.546103477478027 ], [ "▁comportament", -12.546195983886719 ], [ "namen", -12.546229362487793 ], [ "▁ -12.546327590942383 ], [ "enstein", -12.546493530273438 ], [ "▁relevance", -12.546516418457031 ], [ "▁présentation", -12.54655933380127 ], [ "MHz", -12.546648979187012 ], [ "EMA", -12.546661376953125 ], [ "▁palace", -12.546709060668945 ], [ "▁vizibil", -12.546723365783691 ], [ "▁griev", -12.546820640563965 ], [ "▁severely", -12.54688549041748 ], [ "expert", -12.546942710876465 ], [ "▁ravi", -12.54696273803711 ], [ "▁feasible", -12.547002792358398 ], [ "▁Wholesale", -12.547009468078613 ], [ "▁graduat", -12.547077178955078 ], [ "Kü", -12.547094345092773 ], [ "▁quotation", -12.547157287597656 ], [ "/11", -12.54716968536377 ], [ "lutter", -12.547415733337402 ], [ "▁dice", -12.547467231750488 ], [ "modal", -12.547749519348145 ], [ "ggling", -12.547819137573242 ], [ "▁considér", -12.547986030578613 ], [ "▁Insel", -12.548097610473633 ], [ "▁Database", -12.5483980178833 ], [ "icism", -12.548508644104004 ], [ "▁quarterly", -12.54851245880127 ], [ "▁formule", -12.548558235168457 ], [ "▁renouvel", -12.54873275756836 ], [ "▁Treasure", -12.548737525939941 ], [ "▁1962", -12.548844337463379 ], [ "▁republic", -12.549111366271973 ], [ "▁États", -12.549254417419434 ], [ "▁salut", -12.549356460571289 ], [ "HK", -12.54941463470459 ], [ "▁Bali", -12.549427032470703 ], [ "▁Rechnung", -12.549447059631348 ], [ "fruit", -12.54945182800293 ], [ "lays", -12.549467086791992 ], [ "LAS", -12.54951000213623 ], [ "inclin", -12.549708366394043 ], [ "▁Cré", -12.549813270568848 ], [ "▁compt", -12.54985237121582 ], [ "țiilor", -12.550056457519531 ], [ "heft", -12.550111770629883 ], [ "▁Comisi", -12.55024242401123 ], [ "▁Nurse", -12.550516128540039 ], [ "loid", -12.550540924072266 ], [ "grove", -12.550761222839355 ], [ "▁Copy", -12.550867080688477 ], [ "▁Kampf", -12.550873756408691 ], [ "izată", -12.550945281982422 ], [ "würdig", -12.551244735717773 ], [ "-2018", -12.551305770874023 ], [ "ozo", -12.551350593566895 ], [ "▁integriert", -12.551397323608398 ], [ "▁réunion", -12.551448822021484 ], [ "▁mică", -12.551520347595215 ], [ "▁Chau", -12.551595687866211 ], [ "▁allegations", -12.551626205444336 ], [ "▁shaping", -12.551640510559082 ], [ "▁transcription", -12.551671981811523 ], [ "▁Monica", -12.551711082458496 ], [ "▁torture", -12.551795959472656 ], [ "▁cooperative", -12.551962852478027 ], [ "▁invité", -12.551987648010254 ], [ "▁bamboo", -12.552204132080078 ], [ "▁Thinking", -12.55232048034668 ], [ "▁gratis", -12.552392959594727 ], [ "117", -12.55267333984375 ], [ "renz", -12.55279541015625 ], [ "▁Fußball", -12.552823066711426 ], [ "▁Gram", -12.552873611450195 ], [ "sprung", -12.55290412902832 ], [ "▁Schluss", -12.55308723449707 ], [ "▁Diploma", -12.553345680236816 ], [ "▁apparatus", -12.553363800048828 ], [ "notably", -12.553483963012695 ], [ "▁exercit", -12.553532600402832 ], [ "ământ", -12.553536415100098 ], [ "▁masses", -12.553610801696777 ], [ "▁preuve", -12.553642272949219 ], [ "great", -12.553754806518555 ], [ "▁Drink", -12.553792953491211 ], [ "islam", -12.553828239440918 ], [ "ARM", -12.553914070129395 ], [ "indre", -12.554404258728027 ], [ "DW", -12.554410934448242 ], [ "▁Flowers", -12.554500579833984 ], [ "▁pill", -12.554574966430664 ], [ "▁objectifs", -12.554594039916992 ], [ "▁Bezug", -12.554659843444824 ], [ "▁assumptions", -12.55466365814209 ], [ "▁vesti", -12.554742813110352 ], [ "route", -12.554783821105957 ], [ "▁Bangkok", -12.554815292358398 ], [ "▁seamlessly", -12.55482006072998 ], [ "config", -12.554882049560547 ], [ "▁username", -12.554890632629395 ], [ "unsure", -12.555024147033691 ], [ "▁poser", -12.555129051208496 ], [ "▁impozit", -12.555246353149414 ], [ "▁metode", -12.555333137512207 ], [ "defending", -12.555347442626953 ], [ "▁Nic", -12.555431365966797 ], [ "▁Vertrag", -12.555508613586426 ], [ "▁plăcut", -12.55552864074707 ], [ "▁Pou", -12.555675506591797 ], [ "UCH", -12.555785179138184 ], [ "▁Fein", -12.555903434753418 ], [ "reading", -12.555994987487793 ], [ "snip", -12.55604076385498 ], [ "▁Livre", -12.556401252746582 ], [ "lander", -12.556509971618652 ], [ "▁hydraulic", -12.556559562683105 ], [ "veiled", -12.556563377380371 ], [ "intr", -12.556609153747559 ], [ "▁Domnului", -12.556641578674316 ], [ "▁$0.", -12.556713104248047 ], [ "▁kilometers", -12.556753158569336 ], [ "spann", -12.556870460510254 ], [ "▁credibility", -12.556892395019531 ], [ "▁eBook", -12.556953430175781 ], [ "VERY", -12.556994438171387 ], [ "▁Charm", -12.557122230529785 ], [ "Evangeli", -12.557193756103516 ], [ "▁anderer", -12.557193756103516 ], [ "▁Entry", -12.557195663452148 ], [ "ffy", -12.5573148727417 ], [ "▁Exc", -12.55737018585205 ], [ "▁Omega", -12.557446479797363 ], [ "▁Funktionen", -12.557455062866211 ], [ "▁Gay", -12.55752182006836 ], [ "▁acht", -12.557608604431152 ], [ "colored", -12.557615280151367 ], [ "itude", -12.557634353637695 ], [ "▁accompagné", -12.557645797729492 ], [ "▁unfortunate", -12.557981491088867 ], [ "▁DIN", -12.558091163635254 ], [ "▁installment", -12.558252334594727 ], [ "▁indépendant", -12.558307647705078 ], [ "These", -12.558364868164062 ], [ "mitten", -12.558394432067871 ], [ "thank", -12.558470726013184 ], [ "▁Trek", -12.558721542358398 ], [ "üchte", -12.55874252319336 ], [ "▁cuir", -12.55875015258789 ], [ "▁turbo", -12.558802604675293 ], [ "Table", -12.558847427368164 ], [ "▁Extrem", -12.558866500854492 ], [ "▁advertisements", -12.55915355682373 ], [ "▁chaîne", -12.559206008911133 ], [ "▁corridor", -12.559473991394043 ], [ "▁râ", -12.559651374816895 ], [ "▁Opening", -12.559718132019043 ], [ "Get", -12.559747695922852 ], [ "▁storytelling", -12.55976676940918 ], [ "▁severity", -12.559771537780762 ], [ "4\"", -12.559956550598145 ], [ "▁parasit", -12.559967994689941 ], [ "angebot", -12.56002426147461 ], [ "Data", -12.56005573272705 ], [ "listen", -12.560086250305176 ], [ "▁vârstă", -12.560094833374023 ], [ "▁swallow", -12.56025505065918 ], [ "TRE", -12.560321807861328 ], [ "▁daunting", -12.56035041809082 ], [ "▁Oli", -12.560481071472168 ], [ "▁definitive", -12.56066608428955 ], [ "▁rezerva", -12.560667037963867 ], [ "/15", -12.560807228088379 ], [ "▁Landschaft", -12.560887336730957 ], [ "▁Automotive", -12.560934066772461 ], [ "▁convers", -12.56113052368164 ], [ "▁thru", -12.561139106750488 ], [ "▁Township", -12.561140060424805 ], [ "▁tilt", -12.56119441986084 ], [ "▁Criminal", -12.561227798461914 ], [ "riez", -12.561407089233398 ], [ "▁Parking", -12.561440467834473 ], [ "▁humanitarian", -12.561518669128418 ], [ "▁Kilometer", -12.561529159545898 ], [ "controlled", -12.56189250946045 ], [ "▁Klick", -12.561910629272461 ], [ "support", -12.56199836730957 ], [ "handed", -12.562005996704102 ], [ "ämtliche", -12.562104225158691 ], [ "access", -12.562232971191406 ], [ "▁eleven", -12.562232971191406 ], [ "▁ferry", -12.56229305267334 ], [ "zieren", -12.562620162963867 ], [ "▁Gebrauch", -12.562688827514648 ], [ "▁vigoare", -12.562689781188965 ], [ "MON", -12.562756538391113 ], [ "fox", -12.562886238098145 ], [ "bestimmten", -12.562894821166992 ], [ "▁Gur", -12.563069343566895 ], [ "▁Mannschaft", -12.563146591186523 ], [ "▁patrol", -12.563173294067383 ], [ "▁casă", -12.563376426696777 ], [ "▁Stories", -12.563380241394043 ], [ "▁robotic", -12.563425064086914 ], [ "tiri", -12.563576698303223 ], [ "gewiesen", -12.5636568069458 ], [ "CV", -12.563722610473633 ], [ "▁parinti", -12.563899040222168 ], [ "▁Owen", -12.563931465148926 ], [ "▁Katie", -12.564116477966309 ], [ "▁Combine", -12.56422233581543 ], [ "enfalls", -12.56442928314209 ], [ "▁financière", -12.564447402954102 ], [ "▁parliament", -12.564549446105957 ], [ "▁Weekend", -12.564616203308105 ], [ "▁Sonic", -12.564757347106934 ], [ "▁fixture", -12.56479263305664 ], [ "majorité", -12.56497573852539 ], [ "▁gravel", -12.565028190612793 ], [ "realizate", -12.565109252929688 ], [ "examining", -12.565113067626953 ], [ "▁grim", -12.5653657913208 ], [ "▁stabili", -12.565458297729492 ], [ "▁Wochenende", -12.56551456451416 ], [ "▁Hebrew", -12.565597534179688 ], [ "▁Harrison", -12.565799713134766 ], [ "▁boundary", -12.565858840942383 ], [ "40,000", -12.565902709960938 ], [ "▁Ambassador", -12.566208839416504 ], [ "▁scoate", -12.566229820251465 ], [ "ffin", -12.56623363494873 ], [ "▁crème", -12.566269874572754 ], [ "▁obiecte", -12.566378593444824 ], [ "enţa", -12.566763877868652 ], [ "▁subsidiary", -12.566797256469727 ], [ "▁Franco", -12.56688404083252 ], [ "▁visuel", -12.567042350769043 ], [ "▁uitat", -12.56708812713623 ], [ "▁revisit", -12.567122459411621 ], [ "▁Camping", -12.567150115966797 ], [ "▁Divine", -12.567304611206055 ], [ "4-6", -12.567323684692383 ], [ "▁Brandon", -12.567378997802734 ], [ "ма", -12.567450523376465 ], [ "sofern", -12.56745433807373 ], [ "ntweder", -12.56748104095459 ], [ "▁Shoot", -12.567618370056152 ], [ "étais", -12.56771183013916 ], [ "SPEC", -12.567930221557617 ], [ "▁dreapta", -12.567973136901855 ], [ "▁repaired", -12.568055152893066 ], [ "pyr", -12.568136215209961 ], [ "▁warranties", -12.568175315856934 ], [ "▁représent", -12.568263053894043 ], [ "ADE", -12.568293571472168 ], [ "▁selective", -12.56836223602295 ], [ "▁Banking", -12.568441390991211 ], [ "▁ergonomic", -12.568562507629395 ], [ "...”", -12.568602561950684 ], [ "▁willingness", -12.56867790222168 ], [ "isser", -12.568784713745117 ], [ "▁confection", -12.568961143493652 ], [ "admi", -12.569009780883789 ], [ "▁Freizeit", -12.569023132324219 ], [ "▁illuminate", -12.569151878356934 ], [ "▁Repeat", -12.569170951843262 ], [ "▁Zeitpunkt", -12.56933879852295 ], [ "claimed", -12.569439888000488 ], [ "▁erhältlich", -12.569480895996094 ], [ "▁paysage", -12.569537162780762 ], [ "▁Atom", -12.569890022277832 ], [ "▁Graf", -12.570086479187012 ], [ "▁firmware", -12.570093154907227 ], [ "▁Swift", -12.570180892944336 ], [ "▁cercetare", -12.57018756866455 ], [ "▁internațional", -12.570330619812012 ], [ "▁zombie", -12.570330619812012 ], [ "▁Spread", -12.57050609588623 ], [ "ECO", -12.57056999206543 ], [ "▁Gestaltung", -12.570758819580078 ], [ "rast", -12.570858001708984 ], [ "▁perfume", -12.5709228515625 ], [ "▁roulette", -12.570924758911133 ], [ "▁distill", -12.57096004486084 ], [ "▁Produkten", -12.570992469787598 ], [ "225", -12.571310043334961 ], [ "facing", -12.571371078491211 ], [ "▁paradigm", -12.571514129638672 ], [ "▁Rah", -12.571532249450684 ], [ "▁Renault", -12.571846961975098 ], [ "willig", -12.571864128112793 ], [ "▁Vet", -12.571890830993652 ], [ "▁reprezenta", -12.572126388549805 ], [ "stoß", -12.572185516357422 ], [ "▁Weiß", -12.5722074508667 ], [ "▁Solo", -12.572210311889648 ], [ "▁Jin", -12.572646141052246 ], [ "▁Brussels", -12.572693824768066 ], [ "▁Tournament", -12.572693824768066 ], [ "▁proced", -12.572710037231445 ], [ "▁Rabbi", -12.572835922241211 ], [ "▁gameplay", -12.572851181030273 ], [ "▁ATM", -12.572901725769043 ], [ "▁firearm", -12.572906494140625 ], [ "revealing", -12.573003768920898 ], [ "schütz", -12.57310676574707 ], [ "▁Absolutely", -12.573288917541504 ], [ "▁interference", -12.573433876037598 ], [ "▁Employment", -12.573558807373047 ], [ "▁chord", -12.57356071472168 ], [ "▁oportun", -12.573585510253906 ], [ "▁frontier", -12.573770523071289 ], [ "▁Lunch", -12.573891639709473 ], [ "bread", -12.57397174835205 ], [ "▁rendered", -12.573976516723633 ], [ "5.1", -12.573984146118164 ], [ "▁motif", -12.574066162109375 ], [ "▁Schlag", -12.574227333068848 ], [ "113", -12.574264526367188 ], [ "▁Deux", -12.574288368225098 ], [ "▁surplus", -12.574309349060059 ], [ "ALS", -12.574417114257812 ], [ "▁abortion", -12.574472427368164 ], [ "▁airplane", -12.574475288391113 ], [ "▁migrants", -12.574501991271973 ], [ "kli", -12.574539184570312 ], [ "▁crochet", -12.57454776763916 ], [ "fahrer", -12.574671745300293 ], [ "▁reconstruction", -12.57471752166748 ], [ "▁difer", -12.574752807617188 ], [ "▁Conserv", -12.57478141784668 ], [ "▁NSW", -12.57479476928711 ], [ "▁regim", -12.574844360351562 ], [ "▁Except", -12.574904441833496 ], [ "▁trage", -12.574978828430176 ], [ "▁Consiliul", -12.575058937072754 ], [ "▁Bedarf", -12.575064659118652 ], [ "▁additive", -12.5750732421875 ], [ "know", -12.5751371383667 ], [ "▁sauna", -12.57517147064209 ], [ "▁mortality", -12.575201034545898 ], [ "kräftig", -12.575358390808105 ], [ "▁Own", -12.575445175170898 ], [ "nzo", -12.575519561767578 ], [ "▁villes", -12.575543403625488 ], [ "▁recette", -12.575749397277832 ], [ "▁attacking", -12.575799942016602 ], [ "beruf", -12.57608699798584 ], [ "▁integrat", -12.57612419128418 ], [ "realizarea", -12.576201438903809 ], [ "▁exemption", -12.57628345489502 ], [ "GW", -12.576285362243652 ], [ "▁Nano", -12.576395034790039 ], [ "SCH", -12.576440811157227 ], [ "▁honesty", -12.576457023620605 ], [ "▁Arriv", -12.576515197753906 ], [ "▁gland", -12.576542854309082 ], [ "▁proactive", -12.576746940612793 ], [ "▁agile", -12.576837539672852 ], [ "▁kernel", -12.576844215393066 ], [ "▁nurture", -12.576860427856445 ], [ "▁Patent", -12.576963424682617 ], [ "▁excursi", -12.577189445495605 ], [ "pulsion", -12.577326774597168 ], [ "stellte", -12.577351570129395 ], [ "ständige", -12.577421188354492 ], [ "▁Rebecca", -12.577436447143555 ], [ "▁Securities", -12.577436447143555 ], [ "mètre", -12.577446937561035 ], [ "LOW", -12.577469825744629 ], [ "▁consilier", -12.577537536621094 ], [ "▁Architekt", -12.577733993530273 ], [ "▁china", -12.57777214050293 ], [ "älfte", -12.577778816223145 ], [ "▁Combin", -12.577795028686523 ], [ "480", -12.577999114990234 ], [ "liv", -12.578021049499512 ], [ "▁peur", -12.578067779541016 ], [ "keep", -12.57822322845459 ], [ "▁Verhalten", -12.578324317932129 ], [ "▁peek", -12.578446388244629 ], [ "▁dient", -12.578550338745117 ], [ "▁prevazut", -12.578625679016113 ], [ "Emmanuel", -12.57862663269043 ], [ "▁incidence", -12.57862663269043 ], [ "▁Framework", -12.578715324401855 ], [ "dass", -12.578816413879395 ], [ "artiste", -12.578874588012695 ], [ "▁Accept", -12.578971862792969 ], [ "▁plunge", -12.579073905944824 ], [ "chauff", -12.579118728637695 ], [ "▁guilt", -12.579156875610352 ], [ "▁senator", -12.57945442199707 ], [ "▁disable", -12.579776763916016 ], [ "▁partout", -12.579901695251465 ], [ "JC", -12.580045700073242 ], [ "▁Highly", -12.580150604248047 ], [ "▁beneficii", -12.58021068572998 ], [ "fibro", -12.580347061157227 ], [ "interpreted", -12.580550193786621 ], [ "▁genauso", -12.58056354522705 ], [ "▁basil", -12.580601692199707 ], [ "▁Angst", -12.580697059631348 ], [ "rzte", -12.580933570861816 ], [ "Master", -12.58112907409668 ], [ "▁french", -12.581324577331543 ], [ "▁Duration", -12.581343650817871 ], [ "HM", -12.581402778625488 ], [ "▁Bert", -12.581518173217773 ], [ "▁1963", -12.581534385681152 ], [ "▁warrior", -12.581604957580566 ], [ "2007", -12.581696510314941 ], [ "▁recycle", -12.581722259521484 ], [ "▁fertiliz", -12.581808090209961 ], [ "▁hatch", -12.581809997558594 ], [ "ISH", -12.581811904907227 ], [ "luft", -12.582321166992188 ], [ "▁crying", -12.582452774047852 ], [ "▁activist", -12.5824613571167 ], [ "schränkt", -12.582500457763672 ], [ "▁diff", -12.582500457763672 ], [ "▁Demand", -12.58262825012207 ], [ "▁transported", -12.582669258117676 ], [ "▁Remodel", -12.582686424255371 ], [ "▁Etats", -12.582704544067383 ], [ "ANI", -12.582777976989746 ], [ "▁spéciale", -12.582804679870605 ], [ "▁Konzert", -12.582805633544922 ], [ "▁Bedürfnisse", -12.58281135559082 ], [ "▁overlooked", -12.582864761352539 ], [ "▁cutter", -12.582974433898926 ], [ "klär", -12.58311939239502 ], [ "▁Materialien", -12.583135604858398 ], [ "▁gewisse", -12.583388328552246 ], [ "bull", -12.583499908447266 ], [ "Good", -12.583513259887695 ], [ "Gig", -12.583616256713867 ], [ "Logic", -12.583736419677734 ], [ "▁Schlaf", -12.583970069885254 ], [ "▁Yankee", -12.583996772766113 ], [ "▁Batman", -12.584020614624023 ], [ "▁funcție", -12.584166526794434 ], [ "▁partenariat", -12.584294319152832 ], [ "▁Antrag", -12.584348678588867 ], [ "▁Pill", -12.584519386291504 ], [ "▁tram", -12.584637641906738 ], [ "▁Minor", -12.58465576171875 ], [ "pertaining", -12.584678649902344 ], [ "▁apropiere", -12.584843635559082 ], [ "▁Barack", -12.584965705871582 ], [ "schön", -12.585174560546875 ], [ "▁Sandy", -12.585182189941406 ], [ "kilometre", -12.585192680358887 ], [ "▁diy", -12.585234642028809 ], [ "▁1966", -12.585453987121582 ], [ "gelassen", -12.585485458374023 ], [ "▁Trial", -12.585592269897461 ], [ "▁Bauer", -12.585603713989258 ], [ "▁assumption", -12.585648536682129 ], [ "birth", -12.585668563842773 ], [ "rechnen", -12.585861206054688 ], [ "▁meci", -12.585867881774902 ], [ "▁gloss", -12.585906982421875 ], [ "▁sewer", -12.58593463897705 ], [ "▁Stimme", -12.585955619812012 ], [ "▁Fortune", -12.585967063903809 ], [ "▁Lösungen", -12.586007118225098 ], [ "▁impresi", -12.586074829101562 ], [ "schlaf", -12.586089134216309 ], [ "prüfung", -12.586097717285156 ], [ "▁instalat", -12.586198806762695 ], [ "▁picturesque", -12.586233139038086 ], [ "vait", -12.586240768432617 ], [ "8.1", -12.58629035949707 ], [ "▁călători", -12.586392402648926 ], [ "▁dix", -12.586400032043457 ], [ "▁furnished", -12.586411476135254 ], [ "▁dolari", -12.586445808410645 ], [ "▁regener", -12.586562156677246 ], [ "▁astazi", -12.586621284484863 ], [ "▁Sprach", -12.586750030517578 ], [ "delà", -12.586846351623535 ], [ "avec", -12.58694076538086 ], [ "▁Buddhist", -12.586990356445312 ], [ "▁alphabet", -12.586990356445312 ], [ "▁berichtet", -12.587201118469238 ], [ "ideally", -12.587209701538086 ], [ "▁annuel", -12.587421417236328 ], [ "▁laughing", -12.587532997131348 ], [ "▁Zustand", -12.587639808654785 ], [ "cini", -12.587692260742188 ], [ "solid", -12.587724685668945 ], [ "▁Broker", -12.587868690490723 ], [ "▁developmental", -12.5879545211792 ], [ "▁Summary", -12.588191032409668 ], [ "▁Trinity", -12.58819580078125 ], [ "▁sucre", -12.58821964263916 ], [ "▁sandal", -12.588231086730957 ], [ "PEN", -12.588274955749512 ], [ "gewinn", -12.588486671447754 ], [ "olé", -12.588555335998535 ], [ "matric", -12.58865737915039 ], [ "xton", -12.588695526123047 ], [ "werten", -12.588740348815918 ], [ "▁Dust", -12.588765144348145 ], [ "▁Journey", -12.588791847229004 ], [ "▁Rush", -12.588793754577637 ], [ "▁NCAA", -12.588839530944824 ], [ "▁allgemeine", -12.588926315307617 ], [ "▁Universe", -12.589007377624512 ], [ "▁connais", -12.589099884033203 ], [ "▁quantité", -12.58912467956543 ], [ "▁Kab", -12.589150428771973 ], [ "▁purse", -12.589150428771973 ], [ "Health", -12.589210510253906 ], [ "▁apărut", -12.589288711547852 ], [ "▁bypass", -12.589313507080078 ], [ "pronounced", -12.58936595916748 ], [ "▁magnitude", -12.589393615722656 ], [ "▁Walmart", -12.589394569396973 ], [ "ède", -12.589409828186035 ], [ "▁serum", -12.589590072631836 ], [ "▁baseline", -12.589765548706055 ], [ "STER", -12.589932441711426 ], [ "▁ONLY", -12.590052604675293 ], [ "▁individuell", -12.590086936950684 ], [ "▁Ghi", -12.590139389038086 ], [ "▁Ruby", -12.59020709991455 ], [ "▁Chal", -12.590241432189941 ], [ "▁Vier", -12.590261459350586 ], [ "5.0", -12.5903902053833 ], [ "▁fog", -12.590519905090332 ], [ "esel", -12.590557098388672 ], [ "▁Python", -12.590598106384277 ], [ "▁urmează", -12.590608596801758 ], [ "▁trustworthy", -12.590639114379883 ], [ "hört", -12.590729713439941 ], [ "▁tâche", -12.59078311920166 ], [ "Patri", -12.590799331665039 ], [ "▁grind", -12.590928077697754 ], [ "▁Raven", -12.590934753417969 ], [ "▁poursuiv", -12.590951919555664 ], [ "▁simpli", -12.591140747070312 ], [ "▁echo", -12.591165542602539 ], [ "▁Attention", -12.591313362121582 ], [ "Against", -12.591402053833008 ], [ "GET", -12.59148120880127 ], [ "▁turistic", -12.591535568237305 ], [ "▁tenure", -12.59158992767334 ], [ "▁alimentaire", -12.591651916503906 ], [ "Who", -12.59172248840332 ], [ "▁ändern", -12.591729164123535 ], [ "▁rebound", -12.591778755187988 ], [ "grenze", -12.591849327087402 ], [ "▁Fame", -12.592093467712402 ], [ "▁Kick", -12.592215538024902 ], [ "▁Detail", -12.59228801727295 ], [ "▁Push", -12.592308044433594 ], [ "production", -12.592430114746094 ], [ "▁Candidates", -12.59244441986084 ], [ "▁reușit", -12.592484474182129 ], [ "istischen", -12.592525482177734 ], [ "lassung", -12.592649459838867 ], [ "▁Hann", -12.592713356018066 ], [ "espère", -12.592965126037598 ], [ "▁vergessen", -12.593008041381836 ], [ "▁smiling", -12.593010902404785 ], [ "▁devotion", -12.593016624450684 ], [ "▁pastry", -12.593071937561035 ], [ "Add", -12.593390464782715 ], [ "▁authorization", -12.593494415283203 ], [ "▁Suisse", -12.593568801879883 ], [ "▁Berkeley", -12.593611717224121 ], [ "▁Guild", -12.593660354614258 ], [ "▁choir", -12.593748092651367 ], [ "learning", -12.593802452087402 ], [ "▁Tanz", -12.593894004821777 ], [ "mardi", -12.594076156616211 ], [ "▁rezultatele", -12.594191551208496 ], [ "▁earrings", -12.594218254089355 ], [ "▁turbine", -12.594223976135254 ], [ "▁jeudi", -12.594284057617188 ], [ "terapie", -12.594576835632324 ], [ "regain", -12.59461498260498 ], [ "SET", -12.594643592834473 ], [ "▁Hände", -12.594681739807129 ], [ "▁Globe", -12.594683647155762 ], [ "frag", -12.594775199890137 ], [ "▁Treasury", -12.594820976257324 ], [ "▁hazardous", -12.594820976257324 ], [ "▁Fahrt", -12.594928741455078 ], [ "▁fulfilled", -12.594966888427734 ], [ "▁manga", -12.594987869262695 ], [ "▁composé", -12.595067977905273 ], [ "▁ABS", -12.595132827758789 ], [ "▁preced", -12.595197677612305 ], [ "▁beauté", -12.595233917236328 ], [ "▁interessant", -12.59526252746582 ], [ "▁lieber", -12.595324516296387 ], [ "▁Kö", -12.595378875732422 ], [ "EMS", -12.595410346984863 ], [ "FER", -12.595413208007812 ], [ "▁eure", -12.595427513122559 ], [ "▁plumber", -12.595427513122559 ], [ "Love", -12.595463752746582 ], [ "▁Marcus", -12.595635414123535 ], [ "▁registry", -12.595637321472168 ], [ "▁uncle", -12.595696449279785 ], [ "▁neuf", -12.595728874206543 ], [ "▁Fläche", -12.59575080871582 ], [ "▁restaur", -12.595815658569336 ], [ "▁noticeable", -12.595833778381348 ], [ "▁riches", -12.595871925354004 ], [ "occupy", -12.596031188964844 ], [ "▁hurricane", -12.596031188964844 ], [ "▁gespeichert", -12.596033096313477 ], [ "▁Bordeaux", -12.596039772033691 ], [ "▁Maj", -12.59637451171875 ], [ "Applied", -12.596439361572266 ], [ "▁compter", -12.596575736999512 ], [ "impact", -12.59663200378418 ], [ "▁Improve", -12.596758842468262 ], [ "▁Calif", -12.596832275390625 ], [ "▁desfășur", -12.596939086914062 ], [ "▁packaged", -12.597001075744629 ], [ "180", -12.59703540802002 ], [ "devenu", -12.597042083740234 ], [ "▁Battery", -12.597243309020996 ], [ "▁objection", -12.597254753112793 ], [ "▁anual", -12.597305297851562 ], [ "▁Landscape", -12.59731674194336 ], [ "IQ", -12.597403526306152 ], [ "grès", -12.597586631774902 ], [ "▁witnesses", -12.597750663757324 ], [ "enţial", -12.597764015197754 ], [ "▁plateau", -12.597779273986816 ], [ "▁bilete", -12.59783935546875 ], [ "▁Bronze", -12.59786605834961 ], [ "▁Kiss", -12.597946166992188 ], [ "▁Serge", -12.598093032836914 ], [ "atomic", -12.598145484924316 ], [ "▁renovated", -12.59817886352539 ], [ "player", -12.598212242126465 ], [ "▁dirig", -12.598291397094727 ], [ "▁Îm", -12.598296165466309 ], [ "▁plimb", -12.59843635559082 ], [ "▁ambassador", -12.598455429077148 ], [ "▁apropiat", -12.598455429077148 ], [ "▁adaug", -12.598602294921875 ], [ "ogenic", -12.59872055053711 ], [ "kämpfe", -12.598779678344727 ], [ "▁Hillary", -12.598907470703125 ], [ "yak", -12.598942756652832 ], [ "General", -12.59925365447998 ], [ "▁Zugang", -12.599400520324707 ], [ "▁fertil", -12.599457740783691 ], [ "incat", -12.599536895751953 ], [ "assessing", -12.599587440490723 ], [ "▁Cincinnati", -12.59967041015625 ], [ "▁convincing", -12.599685668945312 ], [ "sadly", -12.59974479675293 ], [ "kunde", -12.599801063537598 ], [ "ambul", -12.599913597106934 ], [ "▁familii", -12.599974632263184 ], [ "juri", -12.60007095336914 ], [ "ionen", -12.600102424621582 ], [ "▁Wirtschaft", -12.600130081176758 ], [ "contract", -12.600135803222656 ], [ "punem", -12.600151062011719 ], [ "handlung", -12.600394248962402 ], [ "▁fournir", -12.600455284118652 ], [ "▁Ambi", -12.600663185119629 ], [ "▁Isaac", -12.600663185119629 ], [ "▁praying", -12.6007719039917 ], [ "▁Italien", -12.600848197937012 ], [ "233", -12.600850105285645 ], [ "spawn", -12.600913047790527 ], [ "▁legii", -12.60092544555664 ], [ "▁zuvor", -12.601018905639648 ], [ "▁comune", -12.601030349731445 ], [ "official", -12.601165771484375 ], [ "144", -12.601290702819824 ], [ "izeaza", -12.601329803466797 ], [ "▁Keller", -12.601372718811035 ], [ "ORE", -12.601378440856934 ], [ "122", -12.601485252380371 ], [ "incurred", -12.60150146484375 ], [ "CHA", -12.601579666137695 ], [ "▁Herzen", -12.601590156555176 ], [ "▁reasoning", -12.6016263961792 ], [ "affaire", -12.601849555969238 ], [ "ooth", -12.601890563964844 ], [ "155", -12.601998329162598 ], [ "▁invented", -12.602113723754883 ], [ "▁Comun", -12.602140426635742 ], [ "zähl", -12.602179527282715 ], [ "geliefert", -12.602212905883789 ], [ "explorer", -12.602213859558105 ], [ "nect", -12.602326393127441 ], [ "▁mercredi", -12.602408409118652 ], [ "▁volonté", -12.602408409118652 ], [ "easy", -12.602453231811523 ], [ "▁feat", -12.602490425109863 ], [ "rented", -12.602580070495605 ], [ "▁converter", -12.602592468261719 ], [ "Verhältnis", -12.602713584899902 ], [ "▁Iceland", -12.602792739868164 ], [ "▁pretul", -12.602933883666992 ], [ "▁Vorstellung", -12.602960586547852 ], [ "▁hydrogen", -12.603096008300781 ], [ "▁pouvai", -12.603097915649414 ], [ "▁dawn", -12.603153228759766 ], [ "▁Georg", -12.603269577026367 ], [ "▁cautious", -12.603367805480957 ], [ "▁Pattern", -12.603464126586914 ], [ "▁Ox", -12.603602409362793 ], [ "▁decizie", -12.603676795959473 ], [ "REC", -12.603889465332031 ], [ "▁Mortgage", -12.60393238067627 ], [ "attributed", -12.603973388671875 ], [ "floor", -12.603992462158203 ], [ "▁Wichtig", -12.604207992553711 ], [ "enseignant", -12.604265213012695 ], [ "▁civilization", -12.604302406311035 ], [ "▁dispozitie", -12.60450553894043 ], [ "▁geographic", -12.604543685913086 ], [ "▁Kun", -12.604607582092285 ], [ "LIN", -12.604679107666016 ], [ "▁auzit", -12.604707717895508 ], [ "except", -12.604761123657227 ], [ "▁superbe", -12.604904174804688 ], [ "▁installé", -12.605000495910645 ], [ "▁Peninsula", -12.605154037475586 ], [ "▁norme", -12.605164527893066 ], [ "elul", -12.60517406463623 ], [ "▁Experten", -12.605256080627441 ], [ "expression", -12.605295181274414 ], [ "Christ", -12.605320930480957 ], [ "▁Fuel", -12.605369567871094 ], [ "▁muffin", -12.605485916137695 ], [ "▁lecteur", -12.605521202087402 ], [ "▁gifted", -12.605589866638184 ], [ "▁Japon", -12.605602264404297 ], [ "▁SSD", -12.605644226074219 ], [ "▁Calgary", -12.605765342712402 ], [ "▁hooked", -12.605876922607422 ], [ "▁Joan", -12.605896949768066 ], [ "▁tangible", -12.606083869934082 ], [ "FW", -12.606225967407227 ], [ "olli", -12.6062593460083 ], [ "▁Platinum", -12.606376647949219 ], [ "▁miniature", -12.606392860412598 ], [ "▁lump", -12.606608390808105 ], [ "ologische", -12.60689926147461 ], [ "▁Istanbul", -12.606987953186035 ], [ "▁Compar", -12.607060432434082 ], [ "tropic", -12.607256889343262 ], [ "KING", -12.607279777526855 ], [ "Präsident", -12.607297897338867 ], [ "▁fotografii", -12.607303619384766 ], [ "hoped", -12.607451438903809 ], [ "▁pâte", -12.607601165771484 ], [ "▁mercy", -12.60760498046875 ], [ "▁quiz", -12.607619285583496 ], [ "demonstrating", -12.607678413391113 ], [ "▁douce", -12.607832908630371 ], [ "▁Vest", -12.607841491699219 ], [ "▁Harvey", -12.6082181930542 ], [ "▁breit", -12.608227729797363 ], [ "▁Bereits", -12.608291625976562 ], [ "▁breakthrough", -12.608316421508789 ], [ "▁masterpiece", -12.608320236206055 ], [ "▁Chester", -12.60838794708252 ], [ "▁indiqué", -12.608451843261719 ], [ "hook", -12.60857105255127 ], [ "statutory", -12.608596801757812 ], [ "▁Direkt", -12.608617782592773 ], [ "▁specs", -12.608708381652832 ], [ "Drive", -12.608725547790527 ], [ "▁survivors", -12.608826637268066 ], [ "▁jackpot", -12.608840942382812 ], [ "▁garder", -12.608872413635254 ], [ "▁Geburtstag", -12.60887336730957 ], [ "145", -12.608963966369629 ], [ "▁Clay", -12.609028816223145 ], [ "▁WHO", -12.60906982421875 ], [ "▁Ellen", -12.609393119812012 ], [ "▁bonheur", -12.609440803527832 ], [ "▁hazards", -12.609440803527832 ], [ "▁Kaiser", -12.609488487243652 ], [ "▁tightly", -12.609506607055664 ], [ "Universitatea", -12.609529495239258 ], [ "▁rinse", -12.609533309936523 ], [ "▁passant", -12.609640121459961 ], [ "▁sânge", -12.609832763671875 ], [ "▁peuple", -12.60983657836914 ], [ "jungen", -12.609975814819336 ], [ "▁inappropriate", -12.610054969787598 ], [ "▁mitigate", -12.610066413879395 ], [ "MID", -12.610221862792969 ], [ "▁telecom", -12.610297203063965 ], [ "▁plaj", -12.610316276550293 ], [ "▁presupune", -12.610361099243164 ], [ "acco", -12.61038875579834 ], [ "expressing", -12.610654830932617 ], [ "▁Symphony", -12.61066722869873 ], [ "temperatur", -12.610710144042969 ], [ "▁activităţi", -12.610800743103027 ], [ "▁amended", -12.610847473144531 ], [ "▁rehab", -12.610909461975098 ], [ "▁sportiv", -12.611004829406738 ], [ "hotel", -12.611031532287598 ], [ "branche", -12.61103630065918 ], [ "▁Noch", -12.611079216003418 ], [ "▁1961", -12.611238479614258 ], [ "release", -12.611359596252441 ], [ "blaze", -12.611381530761719 ], [ "Adv", -12.61139965057373 ], [ "Line", -12.611671447753906 ], [ "▁financiare", -12.61184310913086 ], [ "▁chauffage", -12.611919403076172 ], [ "мо", -12.61192512512207 ], [ "schuhe", -12.612035751342773 ], [ "blé", -12.612040519714355 ], [ "▁Echo", -12.612468719482422 ], [ "▁remarks", -12.61253547668457 ], [ "scriu", -12.612629890441895 ], [ "Vir", -12.612701416015625 ], [ "War", -12.61271858215332 ], [ "atifs", -12.613006591796875 ], [ "RING", -12.613082885742188 ], [ "▁Instruction", -12.613150596618652 ], [ "▁verlassen", -12.613155364990234 ], [ "▁ergänz", -12.613234519958496 ], [ "▁Emil", -12.613248825073242 ], [ "▁empire", -12.613263130187988 ], [ "▁Einkauf", -12.613306999206543 ], [ "utigen", -12.613329887390137 ], [ "▁audition", -12.613390922546387 ], [ "travelled", -12.61347484588623 ], [ "ло", -12.613579750061035 ], [ "▁infinite", -12.613720893859863 ], [ "▁Lieblings", -12.613749504089355 ], [ "▁vân", -12.613754272460938 ], [ "▁spinning", -12.613778114318848 ], [ "converting", -12.614031791687012 ], [ "▁uncertain", -12.61415958404541 ], [ "restul", -12.614168167114258 ], [ "▁colourful", -12.61420726776123 ], [ "▁accountant", -12.614338874816895 ], [ "bourg", -12.614532470703125 ], [ "▁structuri", -12.614538192749023 ], [ "▁Booking", -12.61465835571289 ], [ "intéresse", -12.614683151245117 ], [ "▁coordinated", -12.614753723144531 ], [ "▁precaution", -12.61497688293457 ], [ "▁Cheese", -12.615015983581543 ], [ "▁surfing", -12.615192413330078 ], [ "▁souffr", -12.61524486541748 ], [ "▁Menu", -12.615447998046875 ], [ "▁arthritis", -12.615593910217285 ], [ "▁headphones", -12.615601539611816 ], [ "▁upgrading", -12.615602493286133 ], [ "▁apparel", -12.615653038024902 ], [ "▁Haushalt", -12.61572551727295 ], [ "▁Personally", -12.615815162658691 ], [ "▁insane", -12.615950584411621 ], [ "▁fonduri", -12.616083145141602 ], [ "▁entier", -12.616239547729492 ], [ "▁Herbst", -12.616264343261719 ], [ "▁cyclist", -12.616331100463867 ], [ "▁filmmaker", -12.616741180419922 ], [ "▁Portuguese", -12.616829872131348 ], [ "▁nominee", -12.616851806640625 ], [ "▁Yang", -12.616857528686523 ], [ "▁slate", -12.616943359375 ], [ "▁entièrement", -12.616974830627441 ], [ "▁Umgang", -12.617049217224121 ], [ "shifted", -12.617135047912598 ], [ "▁défaut", -12.617138862609863 ], [ "heiz", -12.617246627807617 ], [ "▁Seal", -12.617379188537598 ], [ "▁servicing", -12.617451667785645 ], [ "marketing", -12.617562294006348 ], [ "▁demandé", -12.617755889892578 ], [ "TING", -12.617841720581055 ], [ "▁modifier", -12.617907524108887 ], [ "lysis", -12.617966651916504 ], [ "▁suplimentare", -12.618117332458496 ], [ "OTHER", -12.618359565734863 ], [ "Graph", -12.618379592895508 ], [ "▁coincide", -12.618448257446289 ], [ "governed", -12.618598937988281 ], [ "▁locking", -12.618638038635254 ], [ "▁Properties", -12.618685722351074 ], [ "▁Panama", -12.61876392364502 ], [ "▁Coupe", -12.618846893310547 ], [ "songwriter", -12.618978500366211 ], [ "exhibited", -12.618988990783691 ], [ "▁semnificativ", -12.618995666503906 ], [ "▁purchaser", -12.619004249572754 ], [ "▁puff", -12.619097709655762 ], [ "Back", -12.619105339050293 ], [ "fragt", -12.61919116973877 ], [ "▁deputy", -12.619362831115723 ], [ "▁revien", -12.619556427001953 ], [ "▁Christine", -12.619558334350586 ], [ "▁Cities", -12.619573593139648 ], [ "▁Charakter", -12.61961555480957 ], [ "atteindre", -12.619625091552734 ], [ "▁fou", -12.619635581970215 ], [ "▁obligatoire", -12.619643211364746 ], [ "INA", -12.619791030883789 ], [ "etc", -12.6198148727417 ], [ "▁newborn", -12.620091438293457 ], [ "▁explicitly", -12.620116233825684 ], [ "simplest", -12.620203018188477 ], [ "▁plateforme", -12.62023639678955 ], [ "ordinate", -12.620291709899902 ], [ "displaying", -12.620346069335938 ], [ "▁messy", -12.620464324951172 ], [ "gespielt", -12.620466232299805 ], [ "▁electron", -12.62061882019043 ], [ "▁Dreh", -12.620796203613281 ], [ "▁ambient", -12.620976448059082 ], [ "340", -12.620979309082031 ], [ "▁directive", -12.62109375 ], [ "▁Vall", -12.621152877807617 ], [ "ookie", -12.621206283569336 ], [ "▁wasted", -12.621304512023926 ], [ "CIS", -12.621367454528809 ], [ "lude", -12.621378898620605 ], [ "rach", -12.621472358703613 ], [ "▁gasest", -12.62150764465332 ], [ "▁miros", -12.62150764465332 ], [ "transforming", -12.621536254882812 ], [ "▁Milwaukee", -12.621787071228027 ], [ "▁uncommon", -12.621789932250977 ], [ "▁tableau", -12.621841430664062 ], [ "geräte", -12.621952056884766 ], [ "ophil", -12.622139930725098 ], [ "▁Jeep", -12.62220287322998 ], [ "▁wreck", -12.622422218322754 ], [ "LAND", -12.622434616088867 ], [ "attach", -12.622566223144531 ], [ "▁Panther", -12.622634887695312 ], [ "9:30", -12.622777938842773 ], [ "▁induce", -12.622974395751953 ], [ "▁privest", -12.623006820678711 ], [ "Ident", -12.623047828674316 ], [ "▁illnesses", -12.623076438903809 ], [ "▁inhabitants", -12.623138427734375 ], [ "▁fehlen", -12.623357772827148 ], [ "obtenu", -12.623391151428223 ], [ "▁gegründet", -12.623655319213867 ], [ "ARA", -12.623711585998535 ], [ "3-2", -12.623835563659668 ], [ "▁milliards", -12.623968124389648 ], [ "▁Bü", -12.624001502990723 ], [ "▁angegeben", -12.624102592468262 ], [ "TUR", -12.624143600463867 ], [ "▁arab", -12.624166488647461 ], [ "▁Scientist", -12.624275207519531 ], [ "▁minut", -12.624394416809082 ], [ "▁beast", -12.624481201171875 ], [ "▁accidentally", -12.624573707580566 ], [ "WN", -12.624579429626465 ], [ "▁Ralph", -12.624588966369629 ], [ "hängt", -12.62462329864502 ], [ "▁Erik", -12.624639511108398 ], [ "▁différent", -12.624711990356445 ], [ "▁conformitate", -12.624842643737793 ], [ "thriving", -12.624900817871094 ], [ "▁Piece", -12.625123023986816 ], [ "plasm", -12.625152587890625 ], [ "▁erwarten", -12.62520980834961 ], [ "owski", -12.62523365020752 ], [ "prayed", -12.625293731689453 ], [ "three", -12.625542640686035 ], [ "▁soundtrack", -12.625651359558105 ], [ "guru", -12.625709533691406 ], [ "▁cracked", -12.625710487365723 ], [ "▁adh", -12.625823020935059 ], [ "▁maître", -12.625834465026855 ], [ "▁Oberfläche", -12.62585735321045 ], [ "▁crab", -12.625886917114258 ], [ "▁Foster", -12.625944137573242 ], [ "▁gemütlich", -12.626145362854004 ], [ "SIC", -12.626226425170898 ], [ "ième", -12.626298904418945 ], [ "▁Few", -12.626330375671387 ], [ "gérer", -12.626360893249512 ], [ "2006", -12.626456260681152 ], [ "cool", -12.626498222351074 ], [ "▁dispune", -12.626523971557617 ], [ "recevoir", -12.626577377319336 ], [ "▁Bak", -12.626585960388184 ], [ "▁steer", -12.62659740447998 ], [ "ICS", -12.626733779907227 ], [ "▁Brett", -12.626733779907227 ], [ "▁downside", -12.626751899719238 ], [ "▁residency", -12.62678050994873 ], [ "important", -12.626991271972656 ], [ "ubb", -12.627073287963867 ], [ "mony", -12.627259254455566 ], [ "▁leasing", -12.627341270446777 ], [ "▁Gir", -12.62735366821289 ], [ "▁Biology", -12.627364158630371 ], [ "▁Colin", -12.627463340759277 ], [ "▁complicat", -12.627775192260742 ], [ "▁regroup", -12.627899169921875 ], [ "SPA", -12.627950668334961 ], [ "▁Veranstaltungen", -12.627986907958984 ], [ "convicted", -12.628019332885742 ], [ "▁Wonderful", -12.628636360168457 ], [ "züge", -12.628799438476562 ], [ "yton", -12.628813743591309 ], [ "EMENT", -12.628887176513672 ], [ "▁bent", -12.62893009185791 ], [ "heben", -12.629231452941895 ], [ "▁Sustainable", -12.62926959991455 ], [ "▁Newcastle", -12.629276275634766 ], [ "mother", -12.629507064819336 ], [ "▁eighth", -12.629572868347168 ], [ "▁atmosfer", -12.629582405090332 ], [ "expériment", -12.629584312438965 ], [ "▁Interest", -12.629608154296875 ], [ "▁successes", -12.62964153289795 ], [ "▁preschool", -12.629802703857422 ], [ "▁Funeral", -12.629900932312012 ], [ "blast", -12.630083084106445 ], [ "▁dimensiuni", -12.630125999450684 ], [ "▁Dow", -12.630167007446289 ], [ "▁pulp", -12.63022518157959 ], [ "▁Heather", -12.630356788635254 ], [ "▁erstellen", -12.63044261932373 ], [ "locating", -12.630470275878906 ], [ "direct", -12.630475997924805 ], [ "▁tractor", -12.630494117736816 ], [ "growing", -12.630576133728027 ], [ "▁inventor", -12.630587577819824 ], [ "ASA", -12.63060188293457 ], [ "insta", -12.630732536315918 ], [ "yana", -12.63082504272461 ], [ "▁squash", -12.630839347839355 ], [ "▁Basketball", -12.630853652954102 ], [ "AMA", -12.631041526794434 ], [ "insel", -12.631093978881836 ], [ "▁Fisch", -12.631138801574707 ], [ "▁metaphor", -12.631221771240234 ], [ "TES", -12.631304740905762 ], [ "▁conduce", -12.631308555603027 ], [ "stehende", -12.631370544433594 ], [ "▁FAQ", -12.631475448608398 ], [ "▁bezeichnet", -12.631658554077148 ], [ "wendung", -12.631706237792969 ], [ "▁Commonwealth", -12.631776809692383 ], [ "▁bait", -12.631793975830078 ], [ "▁Umsetzung", -12.631834030151367 ], [ "▁Equi", -12.632063865661621 ], [ "▁validity", -12.632109642028809 ], [ "Off", -12.63222599029541 ], [ "▁produsul", -12.632314682006836 ], [ "▁sensory", -12.632363319396973 ], [ "▁Imperial", -12.632501602172852 ], [ "▁Dick", -12.632542610168457 ], [ "kampf", -12.632596969604492 ], [ "▁Arzt", -12.63267993927002 ], [ "▁Reason", -12.63267993927002 ], [ "ITS", -12.63270092010498 ], [ "URL", -12.632720947265625 ], [ "demonstrates", -12.632725715637207 ], [ "▁dépend", -12.632753372192383 ], [ "NAS", -12.632970809936523 ], [ "▁funcți", -12.633031845092773 ], [ "▁vulnerability", -12.633085250854492 ], [ "2.7", -12.633143424987793 ], [ "layered", -12.633152961730957 ], [ "escence", -12.633206367492676 ], [ "▁République", -12.633346557617188 ], [ "▁Lust", -12.633377075195312 ], [ "▁sute", -12.633381843566895 ], [ "▁autonomous", -12.633661270141602 ], [ "Biserica", -12.633662223815918 ], [ "▁Chuck", -12.633749961853027 ], [ "▁protéger", -12.6339750289917 ], [ "rrell", -12.634061813354492 ], [ "▁Schaden", -12.634062767028809 ], [ "prennent", -12.634100914001465 ], [ "maß", -12.6343412399292 ], [ "OV", -12.634453773498535 ], [ "▁Wake", -12.63450813293457 ], [ "produire", -12.634635925292969 ], [ "▁Elder", -12.634749412536621 ], [ "Max", -12.634839057922363 ], [ "▁Chemistry", -12.634918212890625 ], [ "▁gourmet", -12.634918212890625 ], [ "erri", -12.634967803955078 ], [ "ени", -12.635085105895996 ], [ "▁Gru", -12.635147094726562 ], [ "▁vorbit", -12.635408401489258 ], [ "▁precede", -12.635455131530762 ], [ "▁randomly", -12.635489463806152 ], [ "▁efecte", -12.63563060760498 ], [ "▁calatori", -12.635668754577637 ], [ "▁Poor", -12.635765075683594 ], [ "List", -12.635781288146973 ], [ "▁regula", -12.635964393615723 ], [ "▁organisé", -12.636028289794922 ], [ "Div", -12.636076927185059 ], [ "▁volunteering", -12.636423110961914 ], [ "▁horr", -12.636449813842773 ], [ "9.99", -12.636487007141113 ], [ "▁UPS", -12.636513710021973 ], [ "▁englez", -12.63652229309082 ], [ "▁Eden", -12.636523246765137 ], [ "GG", -12.63659954071045 ], [ "▁typing", -12.63664722442627 ], [ "Likewise", -12.636700630187988 ], [ "▁stabilize", -12.636737823486328 ], [ "physio", -12.636747360229492 ], [ "ми", -12.636785507202148 ], [ "▁protagonist", -12.636808395385742 ], [ "▁velvet", -12.636812210083008 ], [ "schrank", -12.636861801147461 ], [ "▁Allah", -12.63693618774414 ], [ "▁forefront", -12.636968612670898 ], [ "▁salaries", -12.637001037597656 ], [ "▁prediction", -12.637041091918945 ], [ "▁Advent", -12.637182235717773 ], [ "politik", -12.637280464172363 ], [ "▁Heimat", -12.637350082397461 ], [ "ducted", -12.637380599975586 ], [ "ASH", -12.637386322021484 ], [ "▁Mold", -12.637773513793945 ], [ "▁publi", -12.63784122467041 ], [ "▁Vil", -12.637892723083496 ], [ "▁stu", -12.637925148010254 ], [ "INTE", -12.638032913208008 ], [ "▁fave", -12.638151168823242 ], [ "▁grounded", -12.638175010681152 ], [ "▁Anything", -12.638184547424316 ], [ "vik", -12.638481140136719 ], [ "Bank", -12.63853645324707 ], [ "deserved", -12.638550758361816 ], [ "machen", -12.63874626159668 ], [ "▁rugged", -12.638751029968262 ], [ "▁Nest", -12.638901710510254 ], [ "▁profund", -12.639043807983398 ], [ "▁quantum", -12.639067649841309 ], [ "▁funcționa", -12.639118194580078 ], [ "klu", -12.639158248901367 ], [ "▁consulter", -12.63917350769043 ], [ "MED", -12.639286994934082 ], [ "▁câştig", -12.639334678649902 ], [ "▁săptămâni", -12.639334678649902 ], [ "questioned", -12.639517784118652 ], [ "▁Trop", -12.639530181884766 ], [ "▁convo", -12.639533042907715 ], [ "▁sparkling", -12.639533996582031 ], [ "▁specialise", -12.639566421508789 ], [ "▁pancake", -12.639726638793945 ], [ "habitude", -12.639727592468262 ], [ "phal", -12.640009880065918 ], [ "▁Roche", -12.640158653259277 ], [ "▁personalities", -12.640250205993652 ], [ "▁Venice", -12.640308380126953 ], [ "▁comerciale", -12.640379905700684 ], [ "▁wounded", -12.64075756072998 ], [ "▁oraş", -12.640864372253418 ], [ "▁Pepper", -12.641044616699219 ], [ "▁Tourist", -12.641094207763672 ], [ "▁Mull", -12.64116382598877 ], [ "▁dignity", -12.641234397888184 ], [ "▁Fixed", -12.641291618347168 ], [ "çant", -12.64130687713623 ], [ "▁spectator", -12.641402244567871 ], [ "▁somn", -12.641685485839844 ], [ "▁ständig", -12.641820907592773 ], [ "▁resilience", -12.641866683959961 ], [ "▁Malta", -12.642251014709473 ], [ "▁problemele", -12.642253875732422 ], [ "▁Martha", -12.642254829406738 ], [ "▁extern", -12.642267227172852 ], [ "embre", -12.642379760742188 ], [ "▁médical", -12.642526626586914 ], [ "fordern", -12.64256477355957 ], [ "nji", -12.642592430114746 ], [ "▁aboard", -12.642740249633789 ], [ "▁sidewalk", -12.642759323120117 ], [ "WIN", -12.642775535583496 ], [ "▁Bobby", -12.642842292785645 ], [ "▁umfangreiche", -12.642876625061035 ], [ "leid", -12.64292049407959 ], [ "▁compens", -12.642967224121094 ], [ "▁juge", -12.64299488067627 ], [ "gerufen", -12.64311408996582 ], [ "▁médicament", -12.643135070800781 ], [ "▁1918", -12.643155097961426 ], [ "▁blanche", -12.643163681030273 ], [ "▁pleasing", -12.643220901489258 ], [ "▁propria", -12.643471717834473 ], [ "ergebnisse", -12.643503189086914 ], [ "▁retrouv", -12.643571853637695 ], [ "urteil", -12.643592834472656 ], [ "▁Draft", -12.64361572265625 ], [ "▁concluzi", -12.643671035766602 ], [ "centralized", -12.643789291381836 ], [ "▁Hannah", -12.64382266998291 ], [ "grija", -12.64392375946045 ], [ "▁Exercise", -12.643972396850586 ], [ "RAL", -12.644001960754395 ], [ "creme", -12.64408016204834 ], [ "High", -12.644126892089844 ], [ "clude", -12.644131660461426 ], [ "Considering", -12.644208908081055 ], [ "▁Guarantee", -12.644404411315918 ], [ "▁cuptor", -12.644436836242676 ], [ "ivität", -12.64468002319336 ], [ "▁Southwest", -12.644882202148438 ], [ "▁vivant", -12.644890785217285 ], [ "Your", -12.64498519897461 ], [ "▁Stunde", -12.645003318786621 ], [ "▁Ethernet", -12.645040512084961 ], [ "angebote", -12.645078659057617 ], [ "▁Sage", -12.645271301269531 ], [ "▁Boeing", -12.645295143127441 ], [ "▁$300", -12.645381927490234 ], [ "2-4", -12.64546012878418 ], [ "▁nécessit", -12.645516395568848 ], [ "▁ferment", -12.645599365234375 ], [ "▁Anmeldung", -12.64567756652832 ], [ "▁exhausted", -12.645758628845215 ], [ "▁Schloss", -12.645772933959961 ], [ "▁Replacement", -12.645859718322754 ], [ "▁Aussi", -12.645933151245117 ], [ "jection", -12.646127700805664 ], [ "978", -12.64615535736084 ], [ "▁siège", -12.646258354187012 ], [ "crest", -12.646310806274414 ], [ "▁jumatate", -12.646312713623047 ], [ "effizient", -12.646317481994629 ], [ "▁colaborare", -12.6464262008667 ], [ "HQ", -12.646615028381348 ], [ "130", -12.646695137023926 ], [ "culaire", -12.646907806396484 ], [ "▁Jamaica", -12.646952629089355 ], [ "▁cardboard", -12.64731216430664 ], [ "▁technische", -12.64731502532959 ], [ "▁cereri", -12.647507667541504 ], [ "▁contradict", -12.647570610046387 ], [ "▁irrigation", -12.647586822509766 ], [ "Nume", -12.64765739440918 ], [ "▁Bier", -12.647714614868164 ], [ "▁livrare", -12.647903442382812 ], [ "▁reservoir", -12.647906303405762 ], [ "vâr", -12.648130416870117 ], [ "▁galben", -12.648213386535645 ], [ "▁Geneva", -12.648303985595703 ], [ "▁lightning", -12.648418426513672 ], [ "wished", -12.64842414855957 ], [ "▁Blind", -12.648481369018555 ], [ "Interested", -12.648499488830566 ], [ "▁Primări", -12.648627281188965 ], [ "anthropo", -12.648954391479492 ], [ "▁Transaction", -12.648961067199707 ], [ "▁marcat", -12.648971557617188 ], [ "▁gelegen", -12.649077415466309 ], [ "▁contemporain", -12.649182319641113 ], [ "▁politică", -12.649182319641113 ], [ "▁1948", -12.64928150177002 ], [ "▁Mik", -12.649287223815918 ], [ "▁preţ", -12.649310111999512 ], [ "moor", -12.649312973022461 ], [ "ANN", -12.649432182312012 ], [ "▁constructive", -12.649454116821289 ], [ "konzept", -12.649502754211426 ], [ "▁entendu", -12.649511337280273 ], [ "▁Genesis", -12.649541854858398 ], [ "arzt", -12.649581909179688 ], [ "▁Allgemein", -12.64970874786377 ], [ "▁Derby", -12.649725914001465 ], [ "Class", -12.649762153625488 ], [ "▁$12", -12.649770736694336 ], [ "▁Tube", -12.6498441696167 ], [ "▁Contribu", -12.649847030639648 ], [ "▁HAVE", -12.649860382080078 ], [ "▁oxide", -12.64986515045166 ], [ "▁producator", -12.649941444396973 ], [ "▁Bench", -12.650132179260254 ], [ "▁comprehend", -12.650139808654785 ], [ "▁Damen", -12.650494575500488 ], [ "▁Garant", -12.65056037902832 ], [ "▁disappointing", -12.650614738464355 ], [ "▁réalisée", -12.650693893432617 ], [ "▁comportement", -12.65072250366211 ], [ "▁clash", -12.650753021240234 ], [ "▁curry", -12.65076732635498 ], [ "▁Lebanon", -12.65078067779541 ], [ "▁Romaniei", -12.650784492492676 ], [ "▁reprise", -12.650840759277344 ], [ "▁perceive", -12.65095329284668 ], [ "▁weaknesses", -12.65101146697998 ], [ "▁aminti", -12.651057243347168 ], [ "▁Concern", -12.651103973388672 ], [ "shadow", -12.651310920715332 ], [ "▁basin", -12.651311874389648 ], [ "moral", -12.652063369750977 ], [ "▁Hughes", -12.652101516723633 ], [ "Psych", -12.652266502380371 ], [ "▁Lieferung", -12.65227222442627 ], [ "▁serrurier", -12.652379035949707 ], [ "ussi", -12.652386665344238 ], [ "▁timpului", -12.6524658203125 ], [ "üm", -12.652629852294922 ], [ "▁Vladimir", -12.652701377868652 ], [ "▁Jag", -12.65279483795166 ], [ "▁verific", -12.652849197387695 ], [ "▁Pru", -12.652894020080566 ], [ "▁Laut", -12.653285026550293 ], [ "ITA", -12.653287887573242 ], [ "usually", -12.653294563293457 ], [ "▁carrière", -12.65341854095459 ], [ "▁extracted", -12.653663635253906 ], [ "kultur", -12.653679847717285 ], [ "öpfe", -12.653932571411133 ], [ "▁rejection", -12.654016494750977 ], [ "▁Hydr", -12.654062271118164 ], [ "▁informaţii", -12.654098510742188 ], [ "▁tolerate", -12.654122352600098 ], [ "▁cinéma", -12.654302597045898 ], [ "traumatic", -12.654305458068848 ], [ "produkt", -12.654450416564941 ], [ "▁Contest", -12.654560089111328 ], [ "lotte", -12.654570579528809 ], [ "▁Pension", -12.65461254119873 ], [ "▁Advertising", -12.654623985290527 ], [ "▁payout", -12.654772758483887 ], [ "▁Amanda", -12.65481185913086 ], [ "Elect", -12.65485668182373 ], [ "▁interiorul", -12.654996871948242 ], [ "stay", -12.655348777770996 ], [ "▁feminine", -12.655352592468262 ], [ "▁întâmplă", -12.655437469482422 ], [ "▁insult", -12.65562915802002 ], [ "▁chocolat", -12.65567398071289 ], [ "▁noroc", -12.655750274658203 ], [ "▁centr", -12.655781745910645 ], [ "▁Bühne", -12.655858039855957 ], [ "mighty", -12.6558837890625 ], [ "▁Buddha", -12.655908584594727 ], [ "▁parental", -12.655997276306152 ], [ "storm", -12.656451225280762 ], [ "recurring", -12.6565523147583 ], [ "▁luxe", -12.656588554382324 ], [ "niște", -12.656728744506836 ], [ "cuit", -12.656839370727539 ], [ "▁ausgewählt", -12.656880378723145 ], [ "▁dumb", -12.657047271728516 ], [ "IPS", -12.657127380371094 ], [ "▁Thir", -12.65717887878418 ], [ "Definitely", -12.657195091247559 ], [ "▁hilarious", -12.657195091247559 ], [ "▁rainbow", -12.657231330871582 ], [ "▁Bravo", -12.657251358032227 ], [ "▁entstanden", -12.657259941101074 ], [ "itorul", -12.657269477844238 ], [ "▁prosperity", -12.657299041748047 ], [ "▁Bord", -12.657336235046387 ], [ "▁familiei", -12.657363891601562 ], [ "▁scade", -12.657425880432129 ], [ "wöhn", -12.657426834106445 ], [ "▁ingrediente", -12.65743637084961 ], [ "RAD", -12.657441139221191 ], [ "▁tăi", -12.657472610473633 ], [ "bours", -12.65747356414795 ], [ "ATI", -12.657540321350098 ], [ "▁Blake", -12.65761661529541 ], [ "▁Implement", -12.657712936401367 ], [ "▁Beziehung", -12.657838821411133 ], [ "finanz", -12.657953262329102 ], [ "intestin", -12.658513069152832 ], [ "ließen", -12.658535957336426 ], [ "▁récent", -12.658594131469727 ], [ "▁laminate", -12.658692359924316 ], [ "▁Hör", -12.65876579284668 ], [ "▁personnalisé", -12.658804893493652 ], [ "edel", -12.65890121459961 ], [ "▁advertisement", -12.658902168273926 ], [ "▁pinterest", -12.658921241760254 ], [ "185", -12.659058570861816 ], [ "identité", -12.65938949584961 ], [ "▁Brick", -12.659408569335938 ], [ "Glu", -12.65941047668457 ], [ "▁attendant", -12.659571647644043 ], [ "▁Flip", -12.659614562988281 ], [ "attracting", -12.659662246704102 ], [ "functional", -12.659703254699707 ], [ "conceived", -12.659772872924805 ], [ "▁summarize", -12.659773826599121 ], [ "adjusting", -12.659809112548828 ], [ "CAL", -12.660041809082031 ], [ "▁Operating", -12.660076141357422 ], [ "zzi", -12.66008472442627 ], [ "▁Rover", -12.6603364944458 ], [ "▁versuchen", -12.6603364944458 ], [ "▁articulate", -12.660600662231445 ], [ "▁privé", -12.660614013671875 ], [ "▁consequent", -12.660663604736328 ], [ "EAT", -12.660690307617188 ], [ "▁Marsh", -12.660696983337402 ], [ "▁teenage", -12.660717964172363 ], [ "▁Renaissance", -12.660740852355957 ], [ "▁furnizor", -12.660883903503418 ], [ "▁Desert", -12.660894393920898 ], [ "unicipiului", -12.66104793548584 ], [ "▁ulterior", -12.661065101623535 ], [ "▁Ebene", -12.661280632019043 ], [ "▁monkey", -12.661351203918457 ], [ "▁enclosed", -12.661389350891113 ], [ "▁profitability", -12.66139030456543 ], [ "▁Evolution", -12.661628723144531 ], [ "▁adica", -12.661670684814453 ], [ "▁Structure", -12.661709785461426 ], [ "▁primer", -12.661761283874512 ], [ "▁asigură", -12.662001609802246 ], [ "▁Manuel", -12.662220001220703 ], [ "polita", -12.662267684936523 ], [ "▁Portable", -12.662286758422852 ], [ "fecți", -12.662413597106934 ], [ "▁obscure", -12.662424087524414 ], [ "▁Atlas", -12.662436485290527 ], [ "fährt", -12.662679672241211 ], [ "▁clinician", -12.662837982177734 ], [ "fuhr", -12.66310977935791 ], [ "▁matériaux", -12.663113594055176 ], [ "écrire", -12.663142204284668 ], [ "▁suspicious", -12.6632080078125 ], [ "pore", -12.663263320922852 ], [ "▁outdated", -12.663304328918457 ], [ "▁Mädchen", -12.663328170776367 ], [ "rcis", -12.663420677185059 ], [ "nicht", -12.663463592529297 ], [ "holding", -12.663561820983887 ], [ "▁heavier", -12.66366195678711 ], [ "ezimal", -12.663960456848145 ], [ "▁silicone", -12.66397476196289 ], [ "punerea", -12.664108276367188 ], [ "▁begeistert", -12.664237976074219 ], [ "2004", -12.664283752441406 ], [ "▁predecessor", -12.664299011230469 ], [ "▁overlap", -12.664369583129883 ], [ "▁digging", -12.664376258850098 ], [ "▁Upgrade", -12.664407730102539 ], [ "▁interesat", -12.664543151855469 ], [ "▁spinach", -12.66456127166748 ], [ "▁politice", -12.664626121520996 ], [ "activity", -12.664831161499023 ], [ "▁Rating", -12.66484546661377 ], [ "▁serrure", -12.664846420288086 ], [ "▁tânăr", -12.664959907531738 ], [ "▁WHAT", -12.664970397949219 ], [ "▁railroad", -12.664989471435547 ], [ "▁avid", -12.665081024169922 ], [ "▁Sophie", -12.665084838867188 ], [ "preferably", -12.665173530578613 ], [ "▁Fourth", -12.665431022644043 ], [ "kommenden", -12.665452003479004 ], [ "QUI", -12.665478706359863 ], [ "lohn", -12.665505409240723 ], [ "▁promis", -12.665611267089844 ], [ "▁shrub", -12.665621757507324 ], [ "nummer", -12.66579818725586 ], [ "▁dinosaur", -12.665922164916992 ], [ "▁Lucky", -12.665937423706055 ], [ "relates", -12.666038513183594 ], [ "▁FROM", -12.666049003601074 ], [ "▁racism", -12.66610336303711 ], [ "physical", -12.66611385345459 ], [ "alcoholic", -12.666119575500488 ], [ "▁reef", -12.666126251220703 ], [ "▁centru", -12.66618824005127 ], [ "université", -12.66622257232666 ], [ "▁visage", -12.666232109069824 ], [ "ităţile", -12.666253089904785 ], [ "▁Gent", -12.666345596313477 ], [ "zugeben", -12.66643238067627 ], [ "▁paradise", -12.66646957397461 ], [ "fuel", -12.666505813598633 ], [ "ografie", -12.666568756103516 ], [ "▁TIP", -12.666730880737305 ], [ "schreibung", -12.66683292388916 ], [ "▁bark", -12.666840553283691 ], [ "accéder", -12.666895866394043 ], [ "▁contamination", -12.666937828063965 ], [ "▁swelling", -12.666950225830078 ], [ "▁optimistic", -12.666974067687988 ], [ "▁differential", -12.667015075683594 ], [ "▁Arad", -12.667030334472656 ], [ "toxins", -12.667075157165527 ], [ "▁übernehmen", -12.667091369628906 ], [ "▁anime", -12.667143821716309 ], [ "actuel", -12.667462348937988 ], [ "▁bientôt", -12.667525291442871 ], [ "▁Patio", -12.66761302947998 ], [ "▁baisse", -12.667630195617676 ], [ "▁sprint", -12.66773796081543 ], [ "▁bilden", -12.66811466217041 ], [ "VAL", -12.668132781982422 ], [ "▁réflexion", -12.668220520019531 ], [ "hopping", -12.668242454528809 ], [ "genesis", -12.66834545135498 ], [ "achtet", -12.668435096740723 ], [ "▁chinois", -12.668525695800781 ], [ "▁dezvoltat", -12.668795585632324 ], [ "arguably", -12.66884708404541 ], [ "▁Protocol", -12.66884708404541 ], [ "▁Sterling", -12.668862342834473 ], [ "▁Cave", -12.668975830078125 ], [ "▁Condo", -12.66921615600586 ], [ "▁erhöht", -12.669235229492188 ], [ "typische", -12.669416427612305 ], [ "merged", -12.669439315795898 ], [ "▁accumulation", -12.669560432434082 ], [ "sicherlich", -12.669569969177246 ], [ "kW", -12.669620513916016 ], [ "▁schriftlich", -12.669757843017578 ], [ "▁Vorteile", -12.669918060302734 ], [ "▁Northeast", -12.669922828674316 ], [ "frunt", -12.669941902160645 ], [ "istik", -12.670003890991211 ], [ "erster", -12.670035362243652 ], [ "▁Assistance", -12.670150756835938 ], [ "▁Fantastic", -12.670150756835938 ], [ "▁bărbat", -12.670150756835938 ], [ "▁Grinding", -12.670151710510254 ], [ "▁diffusion", -12.670161247253418 ], [ "▁vreun", -12.670331954956055 ], [ "▁Butler", -12.670342445373535 ], [ "▁Cherry", -12.670352935791016 ], [ "▁visualization", -12.670540809631348 ], [ "Paket", -12.670572280883789 ], [ "blin", -12.670619010925293 ], [ "▁cadou", -12.670705795288086 ], [ "▁Celtic", -12.670754432678223 ], [ "alegerea", -12.670894622802734 ], [ "▁Dorf", -12.671035766601562 ], [ "▁Noir", -12.671185493469238 ], [ "payment", -12.67126750946045 ], [ "▁Caroline", -12.671334266662598 ], [ "▁Berry", -12.671359062194824 ], [ "▁professeur", -12.67147445678711 ], [ "▁gratuitement", -12.671503067016602 ], [ "Suntem", -12.671523094177246 ], [ "IAN", -12.671738624572754 ], [ "▁fingerprint", -12.671780586242676 ], [ "▁controversy", -12.671781539916992 ], [ "▁fled", -12.671875 ], [ "▁Pokémon", -12.67210865020752 ], [ "excluding", -12.67211627960205 ], [ "▁friction", -12.672161102294922 ], [ "therapie", -12.67225456237793 ], [ "/7", -12.672398567199707 ], [ "▁designation", -12.672442436218262 ], [ "▁Belgia", -12.672704696655273 ], [ "▁cursuri", -12.672836303710938 ], [ "model", -12.672840118408203 ], [ "super", -12.672987937927246 ], [ "▁réduit", -12.673028945922852 ], [ "▁implicit", -12.673177719116211 ], [ "athlon", -12.673227310180664 ], [ "anniversaire", -12.673416137695312 ], [ "▁teaspoon", -12.673416137695312 ], [ "▁corrosion", -12.673418998718262 ], [ "▁überzeugt", -12.673418998718262 ], [ "▁flawless", -12.673421859741211 ], [ "▁vegetation", -12.673477172851562 ], [ "▁iarna", -12.673507690429688 ], [ "▁psychologist", -12.673591613769531 ], [ "hora", -12.673625946044922 ], [ "gab", -12.67387580871582 ], [ "▁soothing", -12.674084663391113 ], [ "▁stew", -12.674141883850098 ], [ "▁wager", -12.674172401428223 ], [ "▁tinere", -12.674322128295898 ], [ "▁baut", -12.674323081970215 ], [ "ecunoscut", -12.674352645874023 ], [ "gearbeitet", -12.674422264099121 ], [ "▁functi", -12.674480438232422 ], [ "▁dürfte", -12.674724578857422 ], [ "▁média", -12.674724578857422 ], [ "▁campanie", -12.67475700378418 ], [ "▁Distribu", -12.674817085266113 ], [ "▁mentoring", -12.674959182739258 ], [ "▁criz", -12.675020217895508 ], [ "findest", -12.675056457519531 ], [ "▁Vasile", -12.675058364868164 ], [ "▁compassionate", -12.675115585327148 ], [ "▁Tudor", -12.675140380859375 ], [ "▁flare", -12.675260543823242 ], [ "intreaga", -12.675283432006836 ], [ "gaz", -12.6753511428833 ], [ "▁porcelain", -12.675379753112793 ], [ "▁expedition", -12.675520896911621 ], [ "▁Azure", -12.67553997039795 ], [ "räumen", -12.675549507141113 ], [ "eiro", -12.675567626953125 ], [ "variante", -12.675804138183594 ], [ "▁Lucy", -12.675825119018555 ], [ "ôle", -12.675909996032715 ], [ "▁revenir", -12.67602252960205 ], [ "▁stained", -12.676040649414062 ], [ "▁falsch", -12.676166534423828 ], [ "▁incorpor", -12.676166534423828 ], [ "merkt", -12.676187515258789 ], [ "▁achten", -12.6762056350708 ], [ "▁hello", -12.676290512084961 ], [ "selben", -12.676422119140625 ], [ "ifty", -12.676525115966797 ], [ "▁Feier", -12.67653751373291 ], [ "1.000", -12.676557540893555 ], [ "▁Patch", -12.676583290100098 ], [ "peptid", -12.676846504211426 ], [ "▁recovering", -12.676898956298828 ], [ "Symptom", -12.677020072937012 ], [ "▁Auckland", -12.677020072937012 ], [ "▁retrieve", -12.677328109741211 ], [ "▁800-", -12.67733097076416 ], [ "schlagen", -12.677473068237305 ], [ "▁lourd", -12.677562713623047 ], [ "▁Purple", -12.67760181427002 ], [ "▁mittels", -12.677776336669922 ], [ "▁Düsseldorf", -12.67800521850586 ], [ "▁getaway", -12.67803955078125 ], [ "▁Cedar", -12.678061485290527 ], [ "▁Function", -12.678241729736328 ], [ "▁bizarre", -12.67833423614502 ], [ "4.3", -12.67849063873291 ], [ "▁fundraiser", -12.67866325378418 ], [ "geared", -12.678780555725098 ], [ "▁privée", -12.678781509399414 ], [ "▁Bonjour", -12.67894458770752 ], [ "Gar", -12.67895793914795 ], [ "▁Lloyd", -12.678991317749023 ], [ "▁Reinigung", -12.6790132522583 ], [ "▁Geno", -12.679155349731445 ], [ "▁Teilnahme", -12.67919635772705 ], [ "pian", -12.679362297058105 ], [ "sammelt", -12.679368019104004 ], [ "Pad", -12.679755210876465 ], [ "▁Troy", -12.67976188659668 ], [ "HG", -12.679943084716797 ], [ "▁klein", -12.679962158203125 ], [ "▁lettuce", -12.679978370666504 ], [ "▁patrimoine", -12.679978370666504 ], [ "▁cooker", -12.680055618286133 ], [ "▁accesibil", -12.680137634277344 ], [ "▁Spray", -12.680201530456543 ], [ "▁negotiation", -12.68047046661377 ], [ "▁jewel", -12.680480003356934 ], [ "▁dynamique", -12.68063735961914 ], [ "▁plastique", -12.68067741394043 ], [ "▁Limo", -12.680682182312012 ], [ "▁Funk", -12.68069076538086 ], [ "▁omului", -12.680702209472656 ], [ "title", -12.680768013000488 ], [ "curved", -12.68082046508789 ], [ "▁Lemon", -12.680851936340332 ], [ "förder", -12.680891990661621 ], [ "▁bewusst", -12.681112289428711 ], [ "inevitably", -12.681296348571777 ], [ "▁derivative", -12.681297302246094 ], [ "2:30", -12.681300163269043 ], [ "komfort", -12.681305885314941 ], [ "original", -12.681480407714844 ], [ "sanct", -12.681540489196777 ], [ "▁matte", -12.6815767288208 ], [ "empêche", -12.681628227233887 ], [ "▁jucător", -12.681634902954102 ], [ "▁attentive", -12.681640625 ], [ "▁recunoscut", -12.681674003601074 ], [ "▁Brush", -12.68167495727539 ], [ "▁consommateur", -12.68183422088623 ], [ "érence", -12.682063102722168 ], [ "typical", -12.682084083557129 ], [ "strategie", -12.682205200195312 ], [ "Effekt", -12.682290077209473 ], [ "▁Alcohol", -12.682292938232422 ], [ "oji", -12.682333946228027 ], [ "▁ruler", -12.682357788085938 ], [ "▁Norwegian", -12.682615280151367 ], [ "▁PlayStation", -12.682615280151367 ], [ "▁Hook", -12.682747840881348 ], [ "▁viewpoint", -12.682759284973145 ], [ "THER", -12.682841300964355 ], [ "420", -12.682888984680176 ], [ "Consequently", -12.68294620513916 ], [ "▁entschieden", -12.68294620513916 ], [ "▁Trag", -12.68295669555664 ], [ "▁Dawn", -12.683003425598145 ], [ "▁fuss", -12.68301773071289 ], [ "*****", -12.683040618896484 ], [ "▁Bullet", -12.683140754699707 ], [ "CAM", -12.683155059814453 ], [ "▁wonderfully", -12.683201789855957 ], [ "▁parlamentar", -12.683263778686523 ], [ "▁geometric", -12.683307647705078 ], [ "talement", -12.683321952819824 ], [ "/2018", -12.683577537536621 ], [ "▁oversight", -12.684036254882812 ], [ "kindly", -12.684080123901367 ], [ "therm", -12.684305191040039 ], [ "▁treaba", -12.6846342086792 ], [ "▁Trim", -12.68471908569336 ], [ "▁intelege", -12.684842109680176 ], [ "cino", -12.685032844543457 ], [ "▁straw", -12.68508529663086 ], [ "Tru", -12.685251235961914 ], [ "▁Television", -12.68530559539795 ], [ "Trader", -12.68538761138916 ], [ "▁Passion", -12.685394287109375 ], [ "rescu", -12.685622215270996 ], [ "Nicol", -12.685635566711426 ], [ "luj", -12.685805320739746 ], [ "▁mijloace", -12.685921669006348 ], [ "▁Removal", -12.685922622680664 ], [ "▁1944", -12.686034202575684 ], [ "▁shortcut", -12.686159133911133 ], [ "▁Fett", -12.686258316040039 ], [ "largement", -12.686371803283691 ], [ "▁altern", -12.686446189880371 ], [ "▁cleansing", -12.686562538146973 ], [ "▁Qatar", -12.686692237854004 ], [ "▁Ceci", -12.686826705932617 ], [ "▁weave", -12.686848640441895 ], [ "schmerz", -12.686878204345703 ], [ "▁dots", -12.686888694763184 ], [ "Télécharger", -12.68691635131836 ], [ "▁Conduct", -12.686944007873535 ], [ "bekannten", -12.687325477600098 ], [ "▁lungime", -12.687344551086426 ], [ "▁Ferrari", -12.687390327453613 ], [ "▁totusi", -12.687605857849121 ], [ "▁Anniversary", -12.687911033630371 ], [ "▁wilderness", -12.687911987304688 ], [ "▁Christoph", -12.687939643859863 ], [ "▁Nikon", -12.688112258911133 ], [ "▁Digi", -12.68818473815918 ], [ "▁Blumen", -12.688190460205078 ], [ "▁altul", -12.688249588012695 ], [ "▁Parish", -12.688321113586426 ], [ "czy", -12.688393592834473 ], [ "▁temper", -12.688401222229004 ], [ "▁Powder", -12.688576698303223 ], [ "▁Arnold", -12.688577651977539 ], [ "capacitatea", -12.688687324523926 ], [ "nderungen", -12.688787460327148 ], [ "▁utilization", -12.688859939575195 ], [ "99%", -12.688942909240723 ], [ "▁Fear", -12.689099311828613 ], [ "JE", -12.689165115356445 ], [ "▁Simpson", -12.689239501953125 ], [ "▁Podcast", -12.68924617767334 ], [ "▁Cardinal", -12.689290046691895 ], [ "▁Distribution", -12.689315795898438 ], [ "▁Drawing", -12.689373970031738 ], [ "▁tint", -12.689412117004395 ], [ "▁hran", -12.68945598602295 ], [ "▁Slide", -12.68960189819336 ], [ "▁Vertrauen", -12.689654350280762 ], [ "cloth", -12.68971061706543 ], [ "▁redirect", -12.689728736877441 ], [ "126", -12.689842224121094 ], [ "▁constituie", -12.68985652923584 ], [ "Mai", -12.690070152282715 ], [ "▁idol", -12.690088272094727 ], [ "▁tehnice", -12.690163612365723 ], [ "dip", -12.690393447875977 ], [ "▁soldier", -12.690400123596191 ], [ "▁Ordin", -12.690409660339355 ], [ "wobe", -12.69050407409668 ], [ "▁Brent", -12.69058895111084 ], [ "▁Sudan", -12.690597534179688 ], [ "6000", -12.690619468688965 ], [ "turism", -12.690689086914062 ], [ "▁Rocky", -12.690744400024414 ], [ "naming", -12.69092082977295 ], [ "▁entrepreneurial", -12.690925598144531 ], [ "hearted", -12.690962791442871 ], [ "ayne", -12.69097900390625 ], [ "▁hover", -12.691081047058105 ], [ "▁skull", -12.691279411315918 ], [ "▁tribal", -12.691407203674316 ], [ "▁crafting", -12.691543579101562 ], [ "bewertungen", -12.691569328308105 ], [ "▁decizii", -12.691625595092773 ], [ "obwohl", -12.691655158996582 ], [ "▁compromised", -12.691875457763672 ], [ "▁quelqu", -12.69195556640625 ], [ "▁Hilton", -12.692075729370117 ], [ "▁maturity", -12.692095756530762 ], [ "gelesen", -12.692100524902344 ], [ "▁harbor", -12.69210433959961 ], [ "▁maple", -12.692326545715332 ], [ "▁développ", -12.6924409866333 ], [ "▁Nobody", -12.692517280578613 ], [ "équipement", -12.69255542755127 ], [ "121", -12.69274616241455 ], [ "140", -12.692827224731445 ], [ "▁artistes", -12.692914962768555 ], [ "▁depune", -12.692941665649414 ], [ "▁erase", -12.693129539489746 ], [ "▁erzählt", -12.693197250366211 ], [ "▁Hyundai", -12.69323444366455 ], [ "▁impairment", -12.69323444366455 ], [ "▁conving", -12.693279266357422 ], [ "chasing", -12.693426132202148 ], [ "▁Claus", -12.693438529968262 ], [ "▁adaptée", -12.693687438964844 ], [ "▁Raz", -12.693740844726562 ], [ "rugs", -12.693796157836914 ], [ "▁urme", -12.69387435913086 ], [ "Nonetheless", -12.693902015686035 ], [ "▁Cemetery", -12.693902969360352 ], [ "umps", -12.693906784057617 ], [ "ACA", -12.694003105163574 ], [ "▁perioade", -12.694235801696777 ], [ "▁slogan", -12.694263458251953 ], [ "▁downward", -12.694441795349121 ], [ "eidig", -12.694446563720703 ], [ "RAC", -12.69444751739502 ], [ "▁inaugur", -12.694496154785156 ], [ "се", -12.694588661193848 ], [ "▁înțeleg", -12.694608688354492 ], [ "▁hopeful", -12.694635391235352 ], [ "▁customization", -12.6946439743042 ], [ "▁prisoners", -12.694708824157715 ], [ "▁Rau", -12.695270538330078 ], [ "▁Pitt", -12.695389747619629 ], [ "ături", -12.695542335510254 ], [ "▁metabolic", -12.695842742919922 ], [ "▁Zach", -12.695868492126465 ], [ "▁umfassende", -12.695914268493652 ], [ "▁révél", -12.695950508117676 ], [ "131", -12.696052551269531 ], [ "ismului", -12.696062088012695 ], [ "▁Sac", -12.696076393127441 ], [ "efficacité", -12.69624137878418 ], [ "cruci", -12.69625473022461 ], [ "bisschen", -12.69632339477539 ], [ "▁Oster", -12.696324348449707 ], [ "lowered", -12.6964693069458 ], [ "▁Ausland", -12.69674015045166 ], [ "▁Pub", -12.696794509887695 ], [ "▁Marseille", -12.696925163269043 ], [ "▁Charter", -12.696959495544434 ], [ "howcasing", -12.697010040283203 ], [ "risti", -12.6971435546875 ], [ "▁thermostat", -12.697151184082031 ], [ "▁Clin", -12.697233200073242 ], [ "▁entsteht", -12.697246551513672 ], [ "Choosing", -12.697248458862305 ], [ "▁Schmerz", -12.697284698486328 ], [ "▁Till", -12.697307586669922 ], [ "▁Polo", -12.697399139404297 ], [ "▁proceduri", -12.697402000427246 ], [ "▁Believe", -12.697444915771484 ], [ "▁playful", -12.697514533996582 ], [ "▁verändert", -12.697588920593262 ], [ "▁pairing", -12.697654724121094 ], [ "MAG", -12.69784927368164 ], [ "leiste", -12.69788932800293 ], [ "▁testimonial", -12.697916030883789 ], [ "▁Economy", -12.697916984558105 ], [ "▁Wechsel", -12.697918891906738 ], [ "wirkung", -12.69801139831543 ], [ "▁exceeded", -12.698030471801758 ], [ "South", -12.698067665100098 ], [ "create", -12.698221206665039 ], [ "▁davantage", -12.698270797729492 ], [ "Log", -12.69831657409668 ], [ "▁irregular", -12.698587417602539 ], [ "VB", -12.698691368103027 ], [ "▁Rö", -12.698741912841797 ], [ "▁intreb", -12.698881149291992 ], [ "▁penser", -12.698920249938965 ], [ "▁déclaré", -12.698923110961914 ], [ "▁Tommy", -12.699026107788086 ], [ "2,500", -12.699163436889648 ], [ "▁Uganda", -12.699260711669922 ], [ "contacting", -12.699445724487305 ], [ "▁apreciat", -12.699485778808594 ], [ "▁beginnen", -12.6995210647583 ], [ "▁Gain", -12.699580192565918 ], [ "Office", -12.69969654083252 ], [ "ermittlung", -12.699710845947266 ], [ "▁Admission", -12.699727058410645 ], [ "▁Earl", -12.6997652053833 ], [ "▁Aviation", -12.699833869934082 ], [ "▁apologize", -12.699929237365723 ], [ "▁enclosure", -12.699929237365723 ], [ "▁Lack", -12.69998836517334 ], [ "wife", -12.699995994567871 ], [ "▁rotating", -12.700016975402832 ], [ "▁hergestellt", -12.700020790100098 ], [ "▁repository", -12.70002269744873 ], [ "TK", -12.700149536132812 ], [ "▁lectur", -12.700190544128418 ], [ "▁reflex", -12.700286865234375 ], [ "▁Harmon", -12.700401306152344 ], [ "▁vrem", -12.700479507446289 ], [ "▁Strange", -12.70055103302002 ], [ "▁champagne", -12.700615882873535 ], [ "▁oscil", -12.700647354125977 ], [ "sensitive", -12.700677871704102 ], [ "▁Sheriff", -12.700841903686523 ], [ "PRES", -12.700956344604492 ], [ "▁vow", -12.70123291015625 ], [ "▁dioxide", -12.701276779174805 ], [ "ен", -12.701374053955078 ], [ "▁corpului", -12.701376914978027 ], [ "▁prevăzut", -12.70160961151123 ], [ "India", -12.701827049255371 ], [ "hausse", -12.70189094543457 ], [ "▁clienți", -12.701957702636719 ], [ "▁entour", -12.70202350616455 ], [ "▁Sharp", -12.70209789276123 ], [ "▁teatru", -12.702285766601562 ], [ "▁Grow", -12.702327728271484 ], [ "▁caravan", -12.70234203338623 ], [ "▁sieben", -12.702420234680176 ], [ "▁cunosc", -12.702502250671387 ], [ "Bereichen", -12.702527046203613 ], [ "▁Benutzer", -12.702619552612305 ], [ "▁Ethiopia", -12.702619552612305 ], [ "▁Physics", -12.702619552612305 ], [ "preserving", -12.70263385772705 ], [ "ал", -12.702712059020996 ], [ "▁aerial", -12.70272159576416 ], [ "▁nouvel", -12.702741622924805 ], [ "▁stamped", -12.702954292297363 ], [ "▁inaugural", -12.702970504760742 ], [ "▁medicinal", -12.702999114990234 ], [ "Quite", -12.703028678894043 ], [ "accumulated", -12.703165054321289 ], [ "register", -12.703271865844727 ], [ "▁Falcon", -12.70327377319336 ], [ "▁boiling", -12.703301429748535 ], [ "▁advertised", -12.703339576721191 ], [ "collect", -12.703362464904785 ], [ "albeit", -12.703418731689453 ], [ "▁Organis", -12.703473091125488 ], [ "luate", -12.703536033630371 ], [ "▁préféré", -12.70369815826416 ], [ "▁frumoasa", -12.703968048095703 ], [ "▁truc", -12.704092979431152 ], [ "▁Fä", -12.704154968261719 ], [ "▁dome", -12.704180717468262 ], [ "Mobile", -12.704191207885742 ], [ "▁redeem", -12.704198837280273 ], [ "IONS", -12.70422077178955 ], [ "▁țări", -12.704235076904297 ], [ "▁singular", -12.704385757446289 ], [ "▁livestock", -12.704425811767578 ], [ "▁démont", -12.704427719116211 ], [ "clés", -12.704527854919434 ], [ "music", -12.704561233520508 ], [ "▁explicat", -12.704602241516113 ], [ "▁Fellowship", -12.704703330993652 ], [ "▁electrode", -12.704760551452637 ], [ "129", -12.704977035522461 ], [ "▁Rescue", -12.704983711242676 ], [ "▁Rocket", -12.705159187316895 ], [ "OSE", -12.705301284790039 ], [ "▁Sacramento", -12.705317497253418 ], [ "▁Haiti", -12.705357551574707 ], [ "▁Erwachsene", -12.705390930175781 ], [ "▁Terminal", -12.70541000366211 ], [ "URI", -12.705453872680664 ], [ "▁Rural", -12.70549201965332 ], [ "▁achizitiona", -12.70552921295166 ], [ "▁identifiable", -12.705655097961426 ], [ "▁gekauft", -12.705659866333008 ], [ "▁improper", -12.705673217773438 ], [ "lashes", -12.705751419067383 ], [ "vorbim", -12.705751419067383 ], [ "▁hinder", -12.705862045288086 ], [ "▁Grenz", -12.705878257751465 ], [ "Nav", -12.705955505371094 ], [ "alimentation", -12.705972671508789 ], [ "▁Cottage", -12.7059965133667 ], [ "▁nötig", -12.706197738647461 ], [ "▁cuprinde", -12.70622444152832 ], [ "session", -12.706256866455078 ], [ "▁Separat", -12.70634651184082 ], [ "▁besuchen", -12.706672668457031 ], [ "▁noodles", -12.706684112548828 ], [ "▁ballet", -12.706696510314941 ], [ "WG", -12.706731796264648 ], [ "▁Duty", -12.706871032714844 ], [ "▁porc", -12.706944465637207 ], [ "▁booster", -12.70698356628418 ], [ "galerie", -12.707056045532227 ], [ "▁Lance", -12.707119941711426 ], [ "▁déplac", -12.707178115844727 ], [ "▁rugby", -12.707240104675293 ], [ "▁upholstery", -12.707345962524414 ], [ "▁bustl", -12.70736312866211 ], [ "▁Dealer", -12.70740032196045 ], [ "▁genome", -12.707414627075195 ], [ "▁citizenship", -12.707466125488281 ], [ "rora", -12.707515716552734 ], [ "ARK", -12.707776069641113 ], [ "▁Semi", -12.707820892333984 ], [ "▁Improvement", -12.707892417907715 ], [ "▁negru", -12.708142280578613 ], [ "▁Bruxelles", -12.70836067199707 ], [ "flüge", -12.70837688446045 ], [ "▁Technique", -12.708392143249512 ], [ "▁Obst", -12.708413124084473 ], [ "2020", -12.708560943603516 ], [ "▁gek", -12.708593368530273 ], [ "▁drepturi", -12.708600997924805 ], [ "▁Logan", -12.708605766296387 ], [ "gelöst", -12.70863151550293 ], [ "▁grandparents", -12.708702087402344 ], [ "phin", -12.708950996398926 ], [ "▁dwell", -12.709037780761719 ], [ "▁Nobel", -12.709151268005371 ], [ "dial", -12.70927906036377 ], [ "▁spontan", -12.709344863891602 ], [ "advancing", -12.70937728881836 ], [ "starring", -12.70947551727295 ], [ "▁astea", -12.709498405456543 ], [ "igueur", -12.709638595581055 ], [ "▁Ancient", -12.709700584411621 ], [ "filter", -12.70971965789795 ], [ "Doar", -12.709758758544922 ], [ "▁Workers", -12.709759712219238 ], [ "Certainly", -12.709906578063965 ], [ "▁commencé", -12.709914207458496 ], [ "▁zipper", -12.710001945495605 ], [ "▁Selection", -12.710070610046387 ], [ "▁succ", -12.710280418395996 ], [ "headed", -12.710345268249512 ], [ "RIA", -12.710350036621094 ], [ "▁papa", -12.710366249084473 ], [ "▁profesionale", -12.710394859313965 ], [ "▁Zeichen", -12.710402488708496 ], [ "▁artisans", -12.710489273071289 ], [ "▁Geist", -12.710585594177246 ], [ "practic", -12.710741996765137 ], [ "▁ministrul", -12.71076488494873 ], [ "viens", -12.710912704467773 ], [ "prezintă", -12.710919380187988 ], [ "Integrated", -12.710981369018555 ], [ "▁rooftop", -12.710989952087402 ], [ "▁successor", -12.710991859436035 ], [ "OTO", -12.711012840270996 ], [ "liés", -12.711027145385742 ], [ "▁Diver", -12.71121597290039 ], [ "Specifically", -12.711297988891602 ], [ "▁calibr", -12.711301803588867 ], [ "KK", -12.711341857910156 ], [ "▁défense", -12.711414337158203 ], [ "▁english", -12.711414337158203 ], [ "verbrauch", -12.711418151855469 ], [ "▁attire", -12.711433410644531 ], [ "▁Recipe", -12.711441040039062 ], [ "équilibre", -12.711457252502441 ], [ "accumul", -12.71157169342041 ], [ "▁financement", -12.71169662475586 ], [ "rij", -12.711962699890137 ], [ "▁prince", -12.711999893188477 ], [ "▁préparer", -12.7120361328125 ], [ "surviving", -12.71211051940918 ], [ "operation", -12.712233543395996 ], [ "▁judet", -12.71242904663086 ], [ "▁Verantwortung", -12.712433815002441 ], [ "▁Vinyl", -12.712536811828613 ], [ "DEN", -12.712584495544434 ], [ "▁Tail", -12.712589263916016 ], [ "yearly", -12.712590217590332 ], [ "▁comisi", -12.712613105773926 ], [ "lava", -12.71261978149414 ], [ "▁succession", -12.71264934539795 ], [ "▁Whisk", -12.713030815124512 ], [ "▁precizat", -12.713096618652344 ], [ "▁unmittelbar", -12.713117599487305 ], [ "ICH", -12.713139533996582 ], [ "▁atteint", -12.713199615478516 ], [ "▁hometown", -12.713268280029297 ], [ "▁Zip", -12.71328353881836 ], [ "▁Weekly", -12.71336841583252 ], [ "▁crashes", -12.713401794433594 ], [ "▁Turbo", -12.713421821594238 ], [ "▁susține", -12.713468551635742 ], [ "▁Venus", -12.713587760925293 ], [ "▁finalement", -12.713595390319824 ], [ "rewarded", -12.713693618774414 ], [ "▁principau", -12.713899612426758 ], [ "▁régional", -12.713979721069336 ], [ "▁1958", -12.714178085327148 ], [ "▁Musical", -12.714189529418945 ], [ "▁stylist", -12.714251518249512 ], [ "cetate", -12.714282035827637 ], [ "gorge", -12.71433162689209 ], [ "▁espresso", -12.714493751525879 ], [ "überall", -12.714576721191406 ], [ "▁NHL", -12.714593887329102 ], [ "▁Dock", -12.71472454071045 ], [ "▁mosquito", -12.71481704711914 ], [ "▁forthcoming", -12.714852333068848 ], [ "▁Visitors", -12.714881896972656 ], [ "kro", -12.714882850646973 ], [ "_______", -12.715048789978027 ], [ "▁STEM", -12.715105056762695 ], [ "9.5", -12.715141296386719 ], [ "accompagne", -12.715177536010742 ], [ "▁Trick", -12.715202331542969 ], [ "▁endorsement", -12.715400695800781 ], [ "▁amplifier", -12.715498924255371 ], [ "▁malicious", -12.715499877929688 ], [ "▁roam", -12.71552848815918 ], [ "▁kennt", -12.715635299682617 ], [ "Connor", -12.715690612792969 ], [ "▁dysfunction", -12.715828895568848 ], [ "▁zuverlässig", -12.715840339660645 ], [ "▁corpul", -12.71595573425293 ], [ "▁boule", -12.715967178344727 ], [ "otti", -12.715991973876953 ], [ "440", -12.716050148010254 ], [ "▁mimic", -12.716056823730469 ], [ "farben", -12.716129302978516 ], [ "▁Wagner", -12.716214179992676 ], [ "Kom", -12.7162504196167 ], [ "▁miteinander", -12.716269493103027 ], [ "▁String", -12.716296195983887 ], [ "▁Ellis", -12.716313362121582 ], [ "▁Perth", -12.716337203979492 ], [ "▁temperatura", -12.716381072998047 ], [ "umbling", -12.716397285461426 ], [ "▁Medizin", -12.716554641723633 ], [ "▁KY", -12.71660327911377 ], [ "apei", -12.716642379760742 ], [ "counter", -12.716647148132324 ], [ "strich", -12.71665096282959 ], [ "▁Între", -12.716652870178223 ], [ "▁Cliff", -12.716785430908203 ], [ "▁foreclosure", -12.716864585876465 ], [ "................", -12.716878890991211 ], [ "Clearly", -12.717028617858887 ], [ "AJ", -12.717057228088379 ], [ "ndro", -12.717180252075195 ], [ "▁Arsenal", -12.717206001281738 ], [ "▁Recherche", -12.717216491699219 ], [ "Guests", -12.717225074768066 ], [ "▁besucht", -12.717242240905762 ], [ "wissen", -12.717266082763672 ], [ "fekt", -12.717414855957031 ], [ "hottest", -12.717414855957031 ], [ "▁Tomorrow", -12.717547416687012 ], [ "▁Signature", -12.717557907104492 ], [ "127", -12.717583656311035 ], [ "▁competence", -12.71766471862793 ], [ "Einige", -12.717686653137207 ], [ "patented", -12.71782112121582 ], [ "▁Exhibition", -12.717889785766602 ], [ "▁verbessern", -12.717889785766602 ], [ "▁Garcia", -12.718043327331543 ], [ "▁inquire", -12.718278884887695 ], [ "coping", -12.718353271484375 ], [ "▁linguri", -12.71842098236084 ], [ "▁trivia", -12.718433380126953 ], [ "▁începutul", -12.718489646911621 ], [ "▁parteneriat", -12.7186279296875 ], [ "tagen", -12.718636512756348 ], [ "▁engagé", -12.718916893005371 ], [ "▁chalk", -12.718944549560547 ], [ "▁fashionable", -12.719416618347168 ], [ "0.8", -12.719635009765625 ], [ "▁sticker", -12.719751358032227 ], [ "▁desperately", -12.719765663146973 ], [ "höhe", -12.719903945922852 ], [ "▁fericire", -12.71994400024414 ], [ "évaluation", -12.719948768615723 ], [ "▁Divide", -12.719959259033203 ], [ "▁indulge", -12.719979286193848 ], [ "fett", -12.720014572143555 ], [ "▁communal", -12.72017765045166 ], [ "▁mindful", -12.720187187194824 ], [ "dauert", -12.720192909240723 ], [ "▁veille", -12.720263481140137 ], [ "▁vér", -12.720330238342285 ], [ "▁Baseball", -12.720373153686523 ], [ "▁succeeded", -12.720418930053711 ], [ "▁Terrasse", -12.720420837402344 ], [ "irgend", -12.720500946044922 ], [ "▁Munich", -12.720556259155273 ], [ "weisung", -12.72067642211914 ], [ "metre", -12.720916748046875 ], [ "▁Raymond", -12.721015930175781 ], [ "▁chute", -12.72102165222168 ], [ "▁Accounting", -12.721075057983398 ], [ "▁pantry", -12.721122741699219 ], [ "▁underwater", -12.721181869506836 ], [ "ARI", -12.721222877502441 ], [ "lowed", -12.721245765686035 ], [ "numbered", -12.721430778503418 ], [ "REN", -12.72148609161377 ], [ "▁industriel", -12.721489906311035 ], [ "wäh", -12.721531867980957 ], [ "kenntnis", -12.721631050109863 ], [ "▁govern", -12.721635818481445 ], [ "strained", -12.721661567687988 ], [ "▁rythme", -12.721689224243164 ], [ "ин", -12.72169303894043 ], [ "▁burner", -12.721723556518555 ], [ "▁zählt", -12.721790313720703 ], [ "▁verte", -12.721883773803711 ], [ "▁Catalog", -12.721896171569824 ], [ "▁Bruno", -12.721988677978516 ], [ "0.7", -12.721997261047363 ], [ "▁litig", -12.72207260131836 ], [ "▁greet", -12.722129821777344 ], [ "▁stool", -12.722393035888672 ], [ "gression", -12.722457885742188 ], [ "▁Klassen", -12.722491264343262 ], [ "▁neon", -12.722661018371582 ], [ "▁Tall", -12.722734451293945 ], [ "▁satin", -12.722895622253418 ], [ "▁Bend", -12.722915649414062 ], [ "▁soluţi", -12.723077774047852 ], [ "▁styl", -12.723196983337402 ], [ "▁Siri", -12.723358154296875 ], [ "▁Sanders", -12.723464012145996 ], [ "▁spike", -12.723499298095703 ], [ "pinion", -12.723854064941406 ], [ "▁purta", -12.724122047424316 ], [ "CARE", -12.724224090576172 ], [ "▁creştere", -12.724311828613281 ], [ "▁fry", -12.724374771118164 ], [ "▁Schweizer", -12.724400520324707 ], [ "durchschnittlich", -12.724411010742188 ], [ "celaşi", -12.724446296691895 ], [ "▁deceased", -12.724474906921387 ], [ "▁Nerv", -12.724668502807617 ], [ "2-2", -12.7247314453125 ], [ "▁Stahl", -12.724753379821777 ], [ "▁workload", -12.724834442138672 ], [ "erhielt", -12.724984169006348 ], [ "▁hypothesis", -12.725103378295898 ], [ "bib", -12.725110054016113 ], [ "▁ţară", -12.725116729736328 ], [ "vaut", -12.725122451782227 ], [ "prehensi", -12.725184440612793 ], [ "▁Offering", -12.725188255310059 ], [ "▁dislike", -12.725252151489258 ], [ "▁firewall", -12.725252151489258 ], [ "mania", -12.725255966186523 ], [ "195", -12.725278854370117 ], [ "▁Champ", -12.725324630737305 ], [ "▁philosophical", -12.725343704223633 ], [ "länge", -12.72553539276123 ], [ "advisable", -12.725785255432129 ], [ "negotiating", -12.725785255432129 ], [ "Providing", -12.725791931152344 ], [ "▁1959", -12.725801467895508 ], [ "▁spyware", -12.725831031799316 ], [ "sharing", -12.725837707519531 ], [ "▁prévoi", -12.725905418395996 ], [ "▁jaune", -12.7260103225708 ], [ "schoss", -12.726028442382812 ], [ "▁obține", -12.726129531860352 ], [ "▁attraktiv", -12.726489067077637 ], [ "gemeinschaft", -12.7265043258667 ], [ "BV", -12.726505279541016 ], [ "Top", -12.726617813110352 ], [ "▁Sharon", -12.726625442504883 ], [ "bok", -12.726675033569336 ], [ "▁résist", -12.726811408996582 ], [ "Napoca", -12.726822853088379 ], [ "▁Uncategorized", -12.726898193359375 ], [ "▁trustee", -12.726936340332031 ], [ "▁remise", -12.727025985717773 ], [ "▁aştept", -12.727165222167969 ], [ "▁allergic", -12.727206230163574 ], [ "èvre", -12.727211952209473 ], [ "LAR", -12.72734546661377 ], [ "1.9", -12.727497100830078 ], [ "▁outbreak", -12.727520942687988 ], [ "▁trocken", -12.727568626403809 ], [ "▁laughter", -12.727724075317383 ], [ "▁Attend", -12.727785110473633 ], [ "jung", -12.727822303771973 ], [ "racking", -12.727934837341309 ], [ "ORS", -12.728178024291992 ], [ "▁rasp", -12.728527069091797 ], [ "VF", -12.728551864624023 ], [ "▁Tamil", -12.72860050201416 ], [ "124", -12.728602409362793 ], [ "▁Fiber", -12.728714942932129 ], [ "▁launches", -12.728755950927734 ], [ "Post", -12.728777885437012 ], [ "▁bucks", -12.729072570800781 ], [ "▁Nicholas", -12.72923755645752 ], [ "▁cărți", -12.729255676269531 ], [ "emper", -12.729681968688965 ], [ "Point", -12.729689598083496 ], [ "fraction", -12.729753494262695 ], [ "▁BIG", -12.729804992675781 ], [ "▁lancer", -12.729829788208008 ], [ "EVER", -12.72997760772705 ], [ "trend", -12.73000431060791 ], [ "▁remerci", -12.730076789855957 ], [ "▁prevalent", -12.730168342590332 ], [ "370", -12.730290412902832 ], [ "▁bestellen", -12.730327606201172 ], [ "Buying", -12.730341911315918 ], [ "▁Aufbau", -12.730416297912598 ], [ "▁opini", -12.730416297912598 ], [ "▁regiune", -12.730663299560547 ], [ "▁martial", -12.73069953918457 ], [ "LK", -12.730754852294922 ], [ "▁Feuerwehr", -12.730974197387695 ], [ "screened", -12.73099422454834 ], [ "Blue", -12.73120403289795 ], [ "▁analize", -12.731237411499023 ], [ "▁lure", -12.731247901916504 ], [ "▁internally", -12.731283187866211 ], [ "father", -12.731322288513184 ], [ "▁diplomatic", -12.731343269348145 ], [ "▁Activity", -12.731464385986328 ], [ "▁cliqu", -12.73156452178955 ], [ "▁adequately", -12.731809616088867 ], [ "▁Elena", -12.73183822631836 ], [ "▁Citizens", -12.732102394104004 ], [ "▁Länge", -12.732295989990234 ], [ "▁respectful", -12.732300758361816 ], [ "▁zuständig", -12.73248291015625 ], [ "▁réception", -12.732584953308105 ], [ "▁headset", -12.732686996459961 ], [ "▁awhile", -12.732705116271973 ], [ "▁speculation", -12.732707977294922 ], [ "▁WhatsApp", -12.732714653015137 ], [ "▁tulbur", -12.732731819152832 ], [ "▁voluntar", -12.732758522033691 ], [ "▁Studium", -12.73277473449707 ], [ "▁protector", -12.732833862304688 ], [ "▁Wrap", -12.732840538024902 ], [ "staat", -12.732951164245605 ], [ "▁judgement", -12.733396530151367 ], [ "unauthorized", -12.733397483825684 ], [ "Rank", -12.733487129211426 ], [ "pră", -12.733503341674805 ], [ "▁Paw", -12.733627319335938 ], [ "▁relev", -12.733664512634277 ], [ "▁arbor", -12.733830451965332 ], [ "stretches", -12.733885765075684 ], [ "nook", -12.733906745910645 ], [ "▁Tunis", -12.733907699584961 ], [ "▁shocking", -12.734036445617676 ], [ "▁oppress", -12.73414421081543 ], [ "10.1", -12.7341890335083 ], [ "▁ERP", -12.734310150146484 ], [ "wolle", -12.7343168258667 ], [ "▁Catch", -12.734352111816406 ], [ "Plus", -12.734368324279785 ], [ "Market", -12.734445571899414 ], [ "scribed", -12.734536170959473 ], [ "▁décoration", -12.734594345092773 ], [ "▁chanson", -12.734607696533203 ], [ "▁Midwest", -12.734763145446777 ], [ "▁Spencer", -12.734795570373535 ], [ "▁societate", -12.734807968139648 ], [ "curated", -12.735087394714355 ], [ "▁canopy", -12.735135078430176 ], [ "ат", -12.735142707824707 ], [ "Sig", -12.73514461517334 ], [ "▁witch", -12.735153198242188 ], [ "envoyer", -12.735175132751465 ], [ "▁$1,000", -12.735230445861816 ], [ "▁peripheral", -12.735482215881348 ], [ "nnouncing", -12.735509872436523 ], [ "perfect", -12.73559284210205 ], [ "▁warten", -12.735748291015625 ], [ "ELI", -12.735822677612305 ], [ "▁recap", -12.735912322998047 ], [ "dün", -12.735978126525879 ], [ "▁Spre", -12.736029624938965 ], [ "2005", -12.736153602600098 ], [ "▁réparation", -12.73617935180664 ], [ "▁extraordinar", -12.736196517944336 ], [ "existence", -12.736337661743164 ], [ "oanele", -12.736467361450195 ], [ "▁reprezentant", -12.736474990844727 ], [ "▁attacker", -12.736490249633789 ], [ "▁Berliner", -12.73657512664795 ], [ "experience", -12.736649513244629 ], [ "▁Monde", -12.736800193786621 ], [ "intervention", -12.736956596374512 ], [ "▁Einstellung", -12.736977577209473 ], [ "▁Valentin", -12.737011909484863 ], [ "▁zonă", -12.737200736999512 ], [ "occupant", -12.737223625183105 ], [ "▁mobilis", -12.737260818481445 ], [ "metall", -12.737261772155762 ], [ "evangeli", -12.73729133605957 ], [ "Adding", -12.737326622009277 ], [ "▁Roland", -12.73735237121582 ], [ "ENCE", -12.737462043762207 ], [ "▁Insul", -12.737478256225586 ], [ "tellement", -12.737497329711914 ], [ "▁Blogger", -12.737499237060547 ], [ "▁prote", -12.737504005432129 ], [ "▁Minimum", -12.737574577331543 ], [ "▁termic", -12.737624168395996 ], [ "▁Sachen", -12.737859725952148 ], [ "▁Maschinen", -12.737863540649414 ], [ "▁Dragnea", -12.737926483154297 ], [ "▁overtime", -12.737967491149902 ], [ "calorie", -12.737968444824219 ], [ "▁jene", -12.73814868927002 ], [ "▁Satan", -12.738153457641602 ], [ "▁currencies", -12.73827075958252 ], [ "▁echipamente", -12.738329887390137 ], [ "▁forgiveness", -12.73843765258789 ], [ "▁Pause", -12.738479614257812 ], [ "▁Witt", -12.738529205322266 ], [ "STOR", -12.738632202148438 ], [ "▁actuelle", -12.738703727722168 ], [ "▁Ard", -12.738853454589844 ], [ "▁Constitu", -12.738880157470703 ], [ "ghan", -12.7388916015625 ], [ "Make", -12.738906860351562 ], [ "▁garne", -12.738947868347168 ], [ "▁Hitler", -12.738956451416016 ], [ "▁rubbish", -12.738973617553711 ], [ "6.0", -12.739025115966797 ], [ "▁Giving", -12.739177703857422 ], [ "▁persever", -12.73937702178955 ], [ "wirk", -12.7394380569458 ], [ "liegenden", -12.739455223083496 ], [ "▁morceau", -12.73946762084961 ], [ "atty", -12.73961067199707 ], [ "▁Quebec", -12.739669799804688 ], [ "harmonie", -12.739705085754395 ], [ "Nummer", -12.739721298217773 ], [ "▁splendid", -12.739747047424316 ], [ "▁halfway", -12.739808082580566 ], [ "▁periodically", -12.740071296691895 ], [ "▁Ländern", -12.740077018737793 ], [ "▁AAA", -12.740083694458008 ], [ "▁Frost", -12.740198135375977 ], [ "▁heroin", -12.740289688110352 ], [ "▁bucurie", -12.7403564453125 ], [ "▁Pradesh", -12.74036693572998 ], [ "zusetzen", -12.740405082702637 ], [ "raising", -12.740425109863281 ], [ "▁furniz", -12.740567207336426 ], [ "▁convi", -12.740575790405273 ], [ "pictured", -12.740911483764648 ], [ "▁inadequate", -12.741065979003906 ], [ "▁aprobat", -12.741069793701172 ], [ "▁exercising", -12.741083145141602 ], [ "▁faisai", -12.741138458251953 ], [ "▁prosecution", -12.741231918334961 ], [ "380", -12.741402626037598 ], [ "▁Potential", -12.74145793914795 ], [ "▁Magi", -12.741523742675781 ], [ "From", -12.741752624511719 ], [ "batterie", -12.74181079864502 ], [ "▁poisson", -12.74185562133789 ], [ "▁Probe", -12.741950988769531 ], [ "▁pastel", -12.741998672485352 ], [ "▁tracked", -12.742410659790039 ], [ "▁advertisers", -12.74251937866211 ], [ "adevar", -12.742537498474121 ], [ "ит", -12.742776870727539 ], [ "▁Herren", -12.742815971374512 ], [ "EAM", -12.742820739746094 ], [ "▁scooter", -12.742822647094727 ], [ "requesting", -12.742841720581055 ], [ "dynamis", -12.742949485778809 ], [ "▁dahin", -12.742961883544922 ], [ "▁tweak", -12.743061065673828 ], [ "▁hail", -12.743101119995117 ], [ "▁întotdeauna", -12.743160247802734 ], [ "▁Publikum", -12.743167877197266 ], [ "▁panoramic", -12.743167877197266 ], [ "▁PRE", -12.74331283569336 ], [ "▁thrill", -12.743361473083496 ], [ "Open", -12.743366241455078 ], [ "▁Layer", -12.74345588684082 ], [ "▁Bosch", -12.743459701538086 ], [ "hull", -12.743511199951172 ], [ "▁născut", -12.743518829345703 ], [ "tausch", -12.743559837341309 ], [ "▁autoturism", -12.743577003479004 ], [ "▁crank", -12.743701934814453 ], [ "CLE", -12.743735313415527 ], [ "▁Frederick", -12.74386978149414 ], [ "mog", -12.743887901306152 ], [ "behalten", -12.74396800994873 ], [ "▁aunt", -12.744050979614258 ], [ "▁Triple", -12.744141578674316 ], [ "▁Ark", -12.744242668151855 ], [ "AUD", -12.744440078735352 ], [ "▁Candy", -12.744505882263184 ], [ "tama", -12.744515419006348 ], [ "▁Evaluation", -12.744571685791016 ], [ "▁Memphis", -12.744571685791016 ], [ "▁stellar", -12.74457836151123 ], [ "▁fabricat", -12.744632720947266 ], [ "▁terminat", -12.744868278503418 ], [ "▁domnul", -12.744913101196289 ], [ "▁keynote", -12.744925498962402 ], [ "▁dentistry", -12.744951248168945 ], [ "rift", -12.745052337646484 ], [ "▁bilan", -12.745119094848633 ], [ "2.6", -12.745125770568848 ], [ "undergoing", -12.745210647583008 ], [ "▁pseudo", -12.745274543762207 ], [ "▁maşin", -12.745280265808105 ], [ "▁munte", -12.74555492401123 ], [ "▁VW", -12.745932579040527 ], [ "▁Rab", -12.74593448638916 ], [ "▁sustine", -12.745972633361816 ], [ "▁Bedingungen", -12.745977401733398 ], [ "▁învăţ", -12.745980262756348 ], [ "▁pyramid", -12.745983123779297 ], [ "HEN", -12.746020317077637 ], [ "▁citrus", -12.746058464050293 ], [ "Code", -12.746064186096191 ], [ "▁Beginning", -12.746164321899414 ], [ "▁discourse", -12.746249198913574 ], [ "▁miercuri", -12.746329307556152 ], [ "▁producător", -12.74637508392334 ], [ "▁analys", -12.746397972106934 ], [ "▁Evan", -12.7467041015625 ], [ "138", -12.746987342834473 ], [ "▁târziu", -12.74703311920166 ], [ "▁relocation", -12.747052192687988 ], [ "decizia", -12.74708080291748 ], [ "tollen", -12.74714183807373 ], [ "TRO", -12.747180938720703 ], [ "▁runway", -12.74719524383545 ], [ "illet", -12.747270584106445 ], [ "▁serveur", -12.747387886047363 ], [ "bezogen", -12.747427940368652 ], [ "▁believers", -12.747668266296387 ], [ "determined", -12.747711181640625 ], [ "▁reinforced", -12.74791431427002 ], [ "▁wedge", -12.748006820678711 ], [ "methyl", -12.74807357788086 ], [ "MES", -12.748188018798828 ], [ "vpn", -12.748374938964844 ], [ "▁consta", -12.74837875366211 ], [ "▁vizitat", -12.748420715332031 ], [ "modul", -12.748455047607422 ], [ "▁routing", -12.748528480529785 ], [ "tempted", -12.748540878295898 ], [ "URS", -12.748785018920898 ], [ "apprentissage", -12.748795509338379 ], [ "▁Hungary", -12.748796463012695 ], [ "Previously", -12.74880313873291 ], [ "▁translator", -12.748804092407227 ], [ "▁resonate", -12.748830795288086 ], [ "201", -12.748851776123047 ], [ "3-0", -12.749029159545898 ], [ "▁reunion", -12.749090194702148 ], [ "▁palate", -12.749096870422363 ], [ "0.4", -12.749171257019043 ], [ "reheat", -12.74924373626709 ], [ "Roo", -12.749261856079102 ], [ "200,000", -12.74940013885498 ], [ "Bro", -12.749431610107422 ], [ "▁estimation", -12.749468803405762 ], [ "schneiden", -12.749499320983887 ], [ "▁Inspired", -12.749506950378418 ], [ "▁lottery", -12.749539375305176 ], [ "▁Friedrich", -12.749887466430664 ], [ "FIT", -12.749913215637207 ], [ "0.6", -12.7499418258667 ], [ "▁dagegen", -12.74997615814209 ], [ "▁Reb", -12.750115394592285 ], [ "▁Eigenschaften", -12.75020694732666 ], [ "▁molding", -12.750361442565918 ], [ "▁Harper", -12.750548362731934 ], [ "verwaltung", -12.75055980682373 ], [ "▁Schlüssel", -12.75055980682373 ], [ "▁desfasura", -12.75055980682373 ], [ "▁rencontrer", -12.75055980682373 ], [ "▁negoci", -12.750581741333008 ], [ "▁Leading", -12.750615119934082 ], [ "▁necesita", -12.750652313232422 ], [ "▁biking", -12.750683784484863 ], [ "▁jointly", -12.75069808959961 ], [ "▁crush", -12.750702857971191 ], [ "Vol", -12.750768661499023 ], [ "▁ebay", -12.750836372375488 ], [ "▁Shri", -12.750991821289062 ], [ "▁AMD", -12.751029968261719 ], [ "FG", -12.751032829284668 ], [ "Argentin", -12.75120735168457 ], [ "▁incercat", -12.751431465148926 ], [ "▁tidy", -12.751628875732422 ], [ "▁provoqu", -12.751635551452637 ], [ "▁Written", -12.751649856567383 ], [ "▁Kooperation", -12.751666069030762 ], [ "▁scripture", -12.751952171325684 ], [ "▁Pflicht", -12.751974105834961 ], [ "ficial", -12.752013206481934 ], [ "vremea", -12.752013206481934 ], [ "▁Growing", -12.752115249633789 ], [ "▁redesign", -12.752119064331055 ], [ "▁obstacle", -12.752214431762695 ], [ "▁rugam", -12.752235412597656 ], [ "▁SPD", -12.752243995666504 ], [ "165", -12.752270698547363 ], [ "fiz", -12.752284049987793 ], [ "▁startet", -12.752326011657715 ], [ "▁Principle", -12.752327919006348 ], [ "▁abdominal", -12.752327919006348 ], [ "▁podium", -12.752528190612793 ], [ "duty", -12.752616882324219 ], [ "bonne", -12.752679824829102 ], [ "▁Serbia", -12.752687454223633 ], [ "▁brunch", -12.752839088439941 ], [ "▁Personne", -12.752975463867188 ], [ "▁Idea", -12.753034591674805 ], [ "forementioned", -12.753036499023438 ], [ "▁chassis", -12.753037452697754 ], [ "gebühr", -12.753050804138184 ], [ "ucun", -12.753061294555664 ], [ "▁Maz", -12.7531156539917 ], [ "1-4", -12.75318431854248 ], [ "kleid", -12.753273963928223 ], [ "▁Volvo", -12.753337860107422 ], [ "brechen", -12.753378868103027 ], [ "▁homepage", -12.753472328186035 ], [ "fuz", -12.753509521484375 ], [ "▁abgeschlossen", -12.753595352172852 ], [ "▁gelungen", -12.753658294677734 ], [ "▁booklet", -12.753711700439453 ], [ "▁Ukrainian", -12.753745079040527 ], [ "▁Melissa", -12.753746032714844 ], [ "CENT", -12.75379467010498 ], [ "▁intégré", -12.753806114196777 ], [ "weighing", -12.753827095031738 ], [ "▁crumbl", -12.753894805908203 ], [ "▁bunk", -12.754167556762695 ], [ "krieg", -12.754207611083984 ], [ "▁freshman", -12.754307746887207 ], [ "alaya", -12.754339218139648 ], [ "Avem", -12.754353523254395 ], [ "▁Kne", -12.754423141479492 ], [ "▁upstairs", -12.75448226928711 ], [ "AIL", -12.754508972167969 ], [ "țul", -12.75478744506836 ], [ "▁Lecture", -12.754817962646484 ], [ "▁entdecken", -12.754843711853027 ], [ "▁GMT", -12.754912376403809 ], [ "▁Leitung", -12.754937171936035 ], [ "▁inclined", -12.755170822143555 ], [ "▁skillet", -12.75555419921875 ], [ "FN", -12.755742073059082 ], [ "▁Perform", -12.755821228027344 ], [ "shift", -12.75583267211914 ], [ "recognizing", -12.755873680114746 ], [ "▁concise", -12.755873680114746 ], [ "▁obsessed", -12.755873680114746 ], [ "▁removable", -12.755873680114746 ], [ "▁Relax", -12.755888938903809 ], [ "delegates", -12.75605583190918 ], [ "▁expedi", -12.756074905395508 ], [ "▁Schä", -12.756138801574707 ], [ "iete", -12.756211280822754 ], [ "▁reciproc", -12.756229400634766 ], [ "▁neutr", -12.75625228881836 ], [ "lactic", -12.756314277648926 ], [ "▁Nah", -12.756328582763672 ], [ "scene", -12.7565279006958 ], [ "▁Helm", -12.756563186645508 ], [ "▁Bewerbung", -12.756671905517578 ], [ "▁Cassi", -12.75667953491211 ], [ "▁Gelegenheit", -12.756939888000488 ], [ "▁reflective", -12.757140159606934 ], [ "▁încredere", -12.757149696350098 ], [ "▁cigarettes", -12.75717544555664 ], [ "▁Zusätzlich", -12.757295608520508 ], [ "▁intercept", -12.75731372833252 ], [ "▁Finn", -12.757468223571777 ], [ "▁ignor", -12.757661819458008 ], [ "gian", -12.75766372680664 ], [ "BRA", -12.757740020751953 ], [ "leader", -12.757957458496094 ], [ "nius", -12.757981300354004 ], [ "▁skies", -12.757987022399902 ], [ "▁nunta", -12.758023262023926 ], [ "▁grec", -12.758041381835938 ], [ "arranging", -12.75816822052002 ], [ "wartet", -12.758231163024902 ], [ "▁kostet", -12.758377075195312 ], [ "▁Entre", -12.758541107177734 ], [ "Mag", -12.758575439453125 ], [ "▁radiator", -12.758598327636719 ], [ "übrigens", -12.758689880371094 ], [ "Internet", -12.758706092834473 ], [ "▁connexion", -12.758718490600586 ], [ "▁prolonged", -12.758854866027832 ], [ "▁capabil", -12.75914192199707 ], [ "▁feeder", -12.759217262268066 ], [ "Initially", -12.759223937988281 ], [ "Green", -12.75926685333252 ], [ "▁passiert", -12.759272575378418 ], [ "▁courtyard", -12.759299278259277 ], [ "▁judeţ", -12.759320259094238 ], [ "▁Coalition", -12.759431838989258 ], [ "▁atmospheric", -12.759431838989258 ], [ "▁velocity", -12.759431838989258 ], [ "▁Frühstück", -12.759432792663574 ], [ "vacancies", -12.759438514709473 ], [ "unified", -12.759538650512695 ], [ "▁Ahmed", -12.759538650512695 ], [ "poured", -12.759550094604492 ], [ "▁Mikro", -12.75959587097168 ], [ "▁Klar", -12.759661674499512 ], [ "kommt", -12.759681701660156 ], [ "seated", -12.759744644165039 ], [ "musik", -12.75976848602295 ], [ "▁stimulation", -12.759841918945312 ], [ "▁solicitat", -12.759880065917969 ], [ "▁politically", -12.760165214538574 ], [ "restoring", -12.760322570800781 ], [ "▁Rag", -12.760435104370117 ], [ "▁officielle", -12.760468482971191 ], [ "▁Annie", -12.760479927062988 ], [ "▁tourne", -12.760634422302246 ], [ "▁Joel", -12.760642051696777 ], [ "blieben", -12.760666847229004 ], [ "▁repayment", -12.760736465454102 ], [ "▁Strategi", -12.760781288146973 ], [ "▁prietenii", -12.760804176330566 ], [ "▁Montgomery", -12.760858535766602 ], [ "▁résidence", -12.760858535766602 ], [ "▁sunglasses", -12.760858535766602 ], [ "▁1956", -12.760882377624512 ], [ "MEN", -12.76093578338623 ], [ "pouvant", -12.760997772216797 ], [ "375", -12.761061668395996 ], [ "directed", -12.761173248291016 ], [ "▁grinder", -12.76120662689209 ], [ "rträge", -12.761279106140137 ], [ "▁nickel", -12.761299133300781 ], [ "▁Maintain", -12.761313438415527 ], [ "▁Holmes", -12.761392593383789 ], [ "▁obtinut", -12.76157283782959 ], [ "▁walnut", -12.761585235595703 ], [ "▁consultancy", -12.761640548706055 ], [ "cooled", -12.761651039123535 ], [ "▁Brig", -12.761711120605469 ], [ "▁Produc", -12.761873245239258 ], [ "street", -12.76187515258789 ], [ "▁Einfach", -12.761897087097168 ], [ "North", -12.762149810791016 ], [ "▁PET", -12.76220989227295 ], [ "▁Président", -12.762288093566895 ], [ "▁produsului", -12.762457847595215 ], [ "literatur", -12.762483596801758 ], [ "133", -12.762561798095703 ], [ "▁recours", -12.762591361999512 ], [ "▁verpflichtet", -12.76264476776123 ], [ "▁Wur", -12.762733459472656 ], [ "▁psiholog", -12.762796401977539 ], [ "Veg", -12.762871742248535 ], [ "▁hype", -12.762930870056152 ], [ "augmenter", -12.762974739074707 ], [ "▁Welsh", -12.763012886047363 ], [ "mounted", -12.763158798217773 ], [ "▁Wann", -12.763425827026367 ], [ "▁gezeigt", -12.763620376586914 ], [ "▁memo", -12.763631820678711 ], [ "veterinary", -12.763717651367188 ], [ "▁Olympia", -12.763717651367188 ], [ "▁handsome", -12.763871192932129 ], [ "yama", -12.763911247253418 ], [ "studio", -12.763912200927734 ], [ "sozial", -12.764020919799805 ], [ "▁reap", -12.764104843139648 ], [ "▁didactic", -12.764111518859863 ], [ "▁Cookie", -12.764126777648926 ], [ "▁cooper", -12.764230728149414 ], [ "▁discern", -12.76441478729248 ], [ "▁Ubuntu", -12.764433860778809 ], [ "domain", -12.76443862915039 ], [ "▁plasa", -12.764460563659668 ], [ "hong", -12.764585494995117 ], [ "▁Freiheit", -12.764662742614746 ], [ "▁Gateway", -12.764678001403809 ], [ "▁poke", -12.764796257019043 ], [ "▁niedrig", -12.76484203338623 ], [ "▁corrected", -12.764899253845215 ], [ "▁predator", -12.76490306854248 ], [ "QA", -12.76507568359375 ], [ "Physio", -12.765101432800293 ], [ "MAS", -12.765108108520508 ], [ "▁sanctuary", -12.765151023864746 ], [ "▁aferent", -12.76523494720459 ], [ "▁perdre", -12.765268325805664 ], [ "▁recherch", -12.765397071838379 ], [ "ready", -12.76559829711914 ], [ "without", -12.76560115814209 ], [ "▁locuitori", -12.765628814697266 ], [ "▁Memo", -12.765636444091797 ], [ "▁Laden", -12.765646934509277 ], [ "danken", -12.76577377319336 ], [ "▁CNC", -12.765861511230469 ], [ "▁jealous", -12.765881538391113 ], [ "▁Background", -12.765951156616211 ], [ "▁Marx", -12.765999794006348 ], [ "▁Heli", -12.766039848327637 ], [ "▁osteo", -12.766057968139648 ], [ "▁rassembl", -12.766162872314453 ], [ "▁altceva", -12.766226768493652 ], [ "▁beschäftigt", -12.766226768493652 ], [ "▁accru", -12.766266822814941 ], [ "üft", -12.766273498535156 ], [ "▁sprout", -12.766288757324219 ], [ "endorf", -12.76647663116455 ], [ "▁specialitate", -12.766483306884766 ], [ "éanmoins", -12.766586303710938 ], [ "▁poign", -12.766663551330566 ], [ "▁mânca", -12.766668319702148 ], [ "▁stretched", -12.766752243041992 ], [ "fensiv", -12.76677131652832 ], [ "▁Auction", -12.76683235168457 ], [ "hints", -12.766944885253906 ], [ "▁typo", -12.766983032226562 ], [ "▁Rare", -12.767003059387207 ], [ "▁interruption", -12.767043113708496 ], [ "▁Mean", -12.76709270477295 ], [ "privileged", -12.767108917236328 ], [ "▁purtat", -12.767129898071289 ], [ "studie", -12.767229080200195 ], [ "offres", -12.767248153686523 ], [ "▁flap", -12.76729679107666 ], [ "▁rhetoric", -12.767304420471191 ], [ "▁snapshot", -12.767325401306152 ], [ "▁Conservative", -12.767367362976074 ], [ "▁taie", -12.767416954040527 ], [ "Game", -12.767499923706055 ], [ "▁naissance", -12.767663955688477 ], [ "Prof", -12.767704963684082 ], [ "qualified", -12.767745971679688 ], [ "▁suppression", -12.767749786376953 ], [ "▁răspunde", -12.767765045166016 ], [ "▁1/3", -12.767803192138672 ], [ "▁lieben", -12.767858505249023 ], [ "ù", -12.767898559570312 ], [ "america", -12.767955780029297 ], [ "▁Mum", -12.768182754516602 ], [ "▁Researchers", -12.76827335357666 ], [ "quip", -12.768308639526367 ], [ "▁fenomen", -12.768383026123047 ], [ "stools", -12.768387794494629 ], [ "▁commodity", -12.768742561340332 ], [ "▁rejuvenat", -12.768745422363281 ], [ "▁ausgezeichnet", -12.76876449584961 ], [ "▁păcate", -12.768784523010254 ], [ "3.6", -12.76882553100586 ], [ "zwei", -12.768904685974121 ], [ "accounted", -12.768982887268066 ], [ "▁Cycle", -12.76900863647461 ], [ "politischen", -12.769031524658203 ], [ "Normally", -12.76904010772705 ], [ "▁transcend", -12.769158363342285 ], [ "▁Classes", -12.769268989562988 ], [ "▁vene", -12.769363403320312 ], [ "protein", -12.76942253112793 ], [ "formulaire", -12.76944351196289 ], [ "▁endurance", -12.769463539123535 ], [ "▁Census", -12.769464492797852 ], [ "▁census", -12.7694673538208 ], [ "▁conțin", -12.76952838897705 ], [ "▁multinational", -12.769563674926758 ], [ "▁consomm", -12.769572257995605 ], [ "▁Porter", -12.769762992858887 ], [ "▁marvel", -12.769777297973633 ], [ "▁probable", -12.769824028015137 ], [ "dependable", -12.770044326782227 ], [ "▁crore", -12.77015495300293 ], [ "▁6:30", -12.770224571228027 ], [ "▁Bradley", -12.77032470703125 ], [ "molecule", -12.770400047302246 ], [ "inclusiv", -12.770516395568848 ], [ "▁privilégi", -12.770543098449707 ], [ "▁cerere", -12.770611763000488 ], [ "ouille", -12.770696640014648 ], [ "▁âgé", -12.770787239074707 ], [ "▁ghid", -12.770801544189453 ], [ "▁Controller", -12.77082347869873 ], [ "▁incredere", -12.770988464355469 ], [ "▁hostel", -12.771015167236328 ], [ "wissenschaft", -12.771121978759766 ], [ "▁cooperate", -12.771183967590332 ], [ "ки", -12.771202087402344 ], [ "▁Küchen", -12.771384239196777 ], [ "▁BIO", -12.771406173706055 ], [ "▁deliveries", -12.771458625793457 ], [ "▁urmări", -12.771553993225098 ], [ "▁überzeugen", -12.771631240844727 ], [ "Roofing", -12.771703720092773 ], [ "▁Adel", -12.771737098693848 ], [ "▁navy", -12.77181339263916 ], [ "▁cider", -12.772101402282715 ], [ "▁dulce", -12.772109985351562 ], [ "▁inspirat", -12.772163391113281 ], [ "allez", -12.772164344787598 ], [ "HH", -12.77221965789795 ], [ "▁Danish", -12.7722749710083 ], [ "CDC", -12.7722806930542 ], [ "▁Milch", -12.772303581237793 ], [ "▁Hockey", -12.772346496582031 ], [ "▁Smooth", -12.772347450256348 ], [ "▁FIFA", -12.772361755371094 ], [ "▁Devon", -12.772364616394043 ], [ "chung", -12.772379875183105 ], [ "▁villain", -12.772420883178711 ], [ "▁musée", -12.772441864013672 ], [ "tiennent", -12.772557258605957 ], [ "chou", -12.772732734680176 ], [ "kopf", -12.772809982299805 ], [ "printed", -12.77281379699707 ], [ "▁Depression", -12.773076057434082 ], [ "▁opioid", -12.773082733154297 ], [ "nomie", -12.773098945617676 ], [ "▁footwear", -12.773211479187012 ], [ "▁Cause", -12.773260116577148 ], [ "SEL", -12.773515701293945 ], [ "▁Roller", -12.773523330688477 ], [ "▁einzigartige", -12.773589134216309 ], [ "desea", -12.773597717285156 ], [ "▁nasty", -12.773792266845703 ], [ "formulated", -12.773877143859863 ], [ "breaker", -12.773958206176758 ], [ "▁goodies", -12.773961067199707 ], [ "▁sandy", -12.774189949035645 ], [ "method", -12.77425479888916 ], [ "▁Maple", -12.774308204650879 ], [ "gefragt", -12.774435997009277 ], [ "▁decreasing", -12.774515151977539 ], [ "ceşti", -12.774555206298828 ], [ "▁DUI", -12.774563789367676 ], [ "▁pierdere", -12.774574279785156 ], [ "▁brushes", -12.77466869354248 ], [ "▁Fully", -12.774712562561035 ], [ "filtered", -12.774789810180664 ], [ "ruins", -12.774988174438477 ], [ "Save", -12.775114059448242 ], [ "sweeping", -12.7752046585083 ], [ "PCR", -12.775334358215332 ], [ "▁folded", -12.775337219238281 ], [ "▁urca", -12.775444030761719 ], [ "▁clic", -12.775484085083008 ], [ "▁spécialiste", -12.775614738464355 ], [ "▁durfte", -12.775686264038086 ], [ "tuși", -12.775871276855469 ], [ "▁diligent", -12.77596378326416 ], [ "▁verdict", -12.775972366333008 ], [ "▁chaise", -12.776039123535156 ], [ "▁cleanup", -12.776068687438965 ], [ "▁Guitar", -12.776076316833496 ], [ "▁Dip", -12.776142120361328 ], [ "vru", -12.776260375976562 ], [ "▁cogn", -12.776373863220215 ], [ "something", -12.776529312133789 ], [ "hidr", -12.776535034179688 ], [ "ENG", -12.776607513427734 ], [ "Paul", -12.776679039001465 ], [ "▁reboot", -12.776687622070312 ], [ "savvy", -12.776688575744629 ], [ "▁Macron", -12.776710510253906 ], [ "▁Kino", -12.77682876586914 ], [ "232", -12.776832580566406 ], [ "▁gravit", -12.776861190795898 ], [ "ANC", -12.776883125305176 ], [ "▁petrecut", -12.776944160461426 ], [ "▁signage", -12.776959419250488 ], [ "odia", -12.776987075805664 ], [ "▁GRA", -12.77712631225586 ], [ "▁alegeril", -12.777129173278809 ], [ "leger", -12.77717399597168 ], [ "▁medicamente", -12.777174949645996 ], [ "pentru", -12.777249336242676 ], [ "▁collectif", -12.777251243591309 ], [ "▁Sohn", -12.777298927307129 ], [ "205", -12.777313232421875 ], [ "▁Reach", -12.77733039855957 ], [ "RAM", -12.777400970458984 ], [ "3.4", -12.777405738830566 ], [ "▁bleach", -12.777409553527832 ], [ "▁diligence", -12.777414321899414 ], [ "▁MORE", -12.777440071105957 ], [ "▁Critical", -12.777471542358398 ], [ "▁singură", -12.77767276763916 ], [ "▁adversar", -12.777791023254395 ], [ "▁Buzz", -12.7778902053833 ], [ "▁demeure", -12.778063774108887 ], [ "▁nephew", -12.778141021728516 ], [ "▁Boom", -12.77817440032959 ], [ "▁shining", -12.77819538116455 ], [ "▁sponge", -12.778206825256348 ], [ "liest", -12.77841854095459 ], [ "rseits", -12.778690338134766 ], [ "▁capita", -12.778823852539062 ], [ "esthesia", -12.778867721557617 ], [ "500,000", -12.77895736694336 ], [ "▁Pressure", -12.77898120880127 ], [ "ifikation", -12.779021263122559 ], [ "▁acceleration", -12.779181480407715 ], [ "▁Pfarr", -12.779282569885254 ], [ "▁imobil", -12.779304504394531 ], [ "▁pericol", -12.779326438903809 ], [ "▁flock", -12.779454231262207 ], [ "▁Scholar", -12.77962875366211 ], [ "▁Fusion", -12.779630661010742 ], [ "▁revolve", -12.779637336730957 ], [ "Plugin", -12.779664993286133 ], [ "▁Ruf", -12.779691696166992 ], [ "▁tehnici", -12.780024528503418 ], [ "voice", -12.78005313873291 ], [ "▁anomal", -12.780203819274902 ], [ "▁gefallen", -12.780252456665039 ], [ "▁Wyoming", -12.780322074890137 ], [ "▁9:00", -12.780354499816895 ], [ "packed", -12.780461311340332 ], [ "▁Zimbabwe", -12.780686378479004 ], [ "▁glücklich", -12.780766487121582 ], [ "ethanol", -12.78077220916748 ], [ "▁effektiv", -12.780936241149902 ], [ "▁saptamani", -12.781049728393555 ], [ "▁umfasst", -12.781052589416504 ], [ "▁Werbung", -12.781103134155273 ], [ "▁undermine", -12.781164169311523 ], [ "▁Lego", -12.781322479248047 ], [ "▁Rac", -12.781323432922363 ], [ "educating", -12.781441688537598 ], [ "leiten", -12.781451225280762 ], [ "derma", -12.781518936157227 ], [ "hängen", -12.781597137451172 ], [ "Lumin", -12.781846046447754 ], [ "▁PNL", -12.781913757324219 ], [ "▁volcano", -12.782064437866211 ], [ "▁Anfrage", -12.782066345214844 ], [ "▁resp", -12.782124519348145 ], [ "leigh", -12.78217601776123 ], [ "▁addict", -12.782176971435547 ], [ "WORK", -12.782312393188477 ], [ "▁FY", -12.782322883605957 ], [ "▁maneuver", -12.782513618469238 ], [ "flächen", -12.782525062561035 ], [ "zweck", -12.782527923583984 ], [ "tolerant", -12.782609939575195 ], [ "Davidson", -12.78272533416748 ], [ "▁meteor", -12.782849311828613 ], [ "▁Stephanie", -12.78291130065918 ], [ "▁plafon", -12.783126831054688 ], [ "technischen", -12.78316879272461 ], [ "unused", -12.783193588256836 ], [ "▁voulai", -12.783228874206543 ], [ "▁fehlt", -12.783447265625 ], [ "möglichen", -12.783955574035645 ], [ "▁Twenty", -12.783968925476074 ], [ "composing", -12.783979415893555 ], [ "▁rebate", -12.78400707244873 ], [ "Italie", -12.784036636352539 ], [ "▁goodbye", -12.784058570861816 ], [ "wild", -12.784061431884766 ], [ "▁lancé", -12.784077644348145 ], [ "▁wunderschöne", -12.784083366394043 ], [ "▁Frontier", -12.784139633178711 ], [ "▁murit", -12.784313201904297 ], [ "▁scump", -12.78464412689209 ], [ "OVER", -12.784682273864746 ], [ "▁meme", -12.784709930419922 ], [ "Super", -12.784733772277832 ], [ "▁Crack", -12.784849166870117 ], [ "rennen", -12.784907341003418 ], [ "▁interessiert", -12.784941673278809 ], [ "▁relaţi", -12.784942626953125 ], [ "▁factories", -12.784975051879883 ], [ "▁[...]", -12.785066604614258 ], [ "▁vizite", -12.785075187683105 ], [ "▁erfolgen", -12.785199165344238 ], [ "▁Hosting", -12.785244941711426 ], [ "▁localitate", -12.78528118133545 ], [ "▁chasse", -12.785415649414062 ], [ "▁Meadow", -12.785465240478516 ], [ "▁expansive", -12.785513877868652 ], [ "hov", -12.785874366760254 ], [ "Phil", -12.785978317260742 ], [ "illian", -12.786107063293457 ], [ "▁manipulate", -12.786107063293457 ], [ "informationen", -12.786130905151367 ], [ "▁profesionist", -12.786162376403809 ], [ "risen", -12.786252975463867 ], [ "frem", -12.786300659179688 ], [ "Act", -12.78640079498291 ], [ "supervised", -12.786491394042969 ], [ "▁capul", -12.786506652832031 ], [ "▁Craiova", -12.786528587341309 ], [ "▁victoire", -12.786528587341309 ], [ "▁guitarist", -12.786680221557617 ], [ "▁identific", -12.786684036254883 ], [ "democrat", -12.786864280700684 ], [ "Authentic", -12.786894798278809 ], [ "▁Autumn", -12.786894798278809 ], [ "▁bodi", -12.787014961242676 ], [ "April", -12.787044525146484 ], [ "▁Burger", -12.787049293518066 ], [ "▁BEST", -12.787490844726562 ], [ "▁torrent", -12.78749942779541 ], [ "UV", -12.787567138671875 ], [ "▁renal", -12.787676811218262 ], [ "founded", -12.787693977355957 ], [ "203", -12.787956237792969 ], [ "▁Flooring", -12.78799057006836 ], [ "▁kilogram", -12.787994384765625 ], [ "▁garantiert", -12.788139343261719 ], [ "▁fulfil", -12.788204193115234 ], [ "303", -12.788330078125 ], [ "▁schafft", -12.788363456726074 ], [ "▁butterfly", -12.788365364074707 ], [ "▁Stuart", -12.788382530212402 ], [ "▁Versuch", -12.788392066955566 ], [ "▁liking", -12.788412094116211 ], [ "▁chercher", -12.788508415222168 ], [ "▁wrapping", -12.788527488708496 ], [ "schrieb", -12.788652420043945 ], [ "▁abuz", -12.788718223571777 ], [ "▁maîtrise", -12.788772583007812 ], [ "EQ", -12.788887977600098 ], [ "▁Erinnerung", -12.789095878601074 ], [ "▁bridal", -12.78909969329834 ], [ "Rock", -12.789118766784668 ], [ "▁copied", -12.789193153381348 ], [ "Met", -12.789206504821777 ], [ "▁incep", -12.789233207702637 ], [ "▁sinus", -12.789336204528809 ], [ "▁Felix", -12.789831161499023 ], [ "▁Deluxe", -12.789837837219238 ], [ "▁GPU", -12.789848327636719 ], [ "Sie", -12.790164947509766 ], [ "lowering", -12.790262222290039 ], [ "▁Trotz", -12.790282249450684 ], [ "333", -12.790417671203613 ], [ "withstand", -12.79055118560791 ], [ "▁Aufenthalt", -12.790566444396973 ], [ "▁unhealthy", -12.790567398071289 ], [ "▁urbain", -12.790573120117188 ], [ "▁LOL", -12.790702819824219 ], [ "▁Ballet", -12.79074478149414 ], [ "▁Decoration", -12.79083251953125 ], [ "weist", -12.790839195251465 ], [ "▁Residence", -12.790932655334473 ], [ "▁Leeds", -12.791055679321289 ], [ "▁Genau", -12.791084289550781 ], [ "Imagin", -12.791136741638184 ], [ "▁suspicion", -12.791300773620605 ], [ "▁pêche", -12.791301727294922 ], [ "▁Soccer", -12.791306495666504 ], [ "▁protectie", -12.791553497314453 ], [ "ATS", -12.791796684265137 ], [ "stocked", -12.791838645935059 ], [ "▁gymnas", -12.79184627532959 ], [ "ASP", -12.792027473449707 ], [ "▁Independence", -12.792037010192871 ], [ "▁Wizard", -12.792037963867188 ], [ "▁nitrogen", -12.79204273223877 ], [ "amerikanische", -12.7920503616333 ], [ "▁Indianapolis", -12.79205322265625 ], [ "catches", -12.792131423950195 ], [ "stria", -12.792275428771973 ], [ "schätze", -12.79235553741455 ], [ "▁Räume", -12.792387962341309 ], [ "▁Interesting", -12.792403221130371 ], [ "bürger", -12.79240608215332 ], [ "sweet", -12.792410850524902 ], [ "Identify", -12.792632102966309 ], [ "EEN", -12.792651176452637 ], [ "▁£3", -12.792654991149902 ], [ "interacting", -12.7926664352417 ], [ "NYSE", -12.792762756347656 ], [ "▁Dynamics", -12.79277515411377 ], [ "▁modificări", -12.792777061462402 ], [ "▁Kumar", -12.792936325073242 ], [ "chette", -12.79313850402832 ], [ "▁presiune", -12.79316234588623 ], [ "arni", -12.793164253234863 ], [ "▁vielfältig", -12.793221473693848 ], [ "KC", -12.793259620666504 ], [ "▁Cuisine", -12.793513298034668 ], [ "▁australia", -12.793885231018066 ], [ "▁încet", -12.794026374816895 ], [ "▁caracteristic", -12.794257164001465 ], [ "▁cookbook", -12.794501304626465 ], [ "▁douleur", -12.79453182220459 ], [ "AVI", -12.794593811035156 ], [ "artikel", -12.794740676879883 ], [ "feta", -12.79493522644043 ], [ "▁fréquent", -12.794987678527832 ], [ "▁Prophet", -12.795051574707031 ], [ "▁dépense", -12.795202255249023 ], [ "▁Smile", -12.795235633850098 ], [ "▁lawmakers", -12.79525375366211 ], [ "▁Kollegen", -12.795391082763672 ], [ "▁Pir", -12.79555606842041 ], [ "serez", -12.79561710357666 ], [ "▁consumator", -12.795656204223633 ], [ "▁playlist", -12.795730590820312 ], [ "▁envisage", -12.795733451843262 ], [ "swept", -12.795780181884766 ], [ "▁Grim", -12.795825004577637 ], [ "▁widow", -12.795836448669434 ], [ "authorised", -12.795886039733887 ], [ "▁(...)", -12.796035766601562 ], [ "▁photographic", -12.796060562133789 ], [ "▁libertate", -12.796173095703125 ], [ "▁principalement", -12.796201705932617 ], [ "umming", -12.796260833740234 ], [ "▁Montréal", -12.796465873718262 ], [ "▁compilation", -12.796468734741211 ], [ "▁erlaubt", -12.79647159576416 ], [ "▁biblical", -12.796518325805664 ], [ "volume", -12.796561241149902 ], [ "5-7", -12.796809196472168 ], [ "▁Versch", -12.79689884185791 ], [ "▁Shark", -12.796957015991211 ], [ "ologne", -12.796969413757324 ], [ "4.4", -12.797086715698242 ], [ "decken", -12.797112464904785 ], [ "▁frequencies", -12.797205924987793 ], [ "▁inferior", -12.79720687866211 ], [ "visible", -12.797321319580078 ], [ "▁educator", -12.797394752502441 ], [ "▁soziale", -12.797420501708984 ], [ "▁billet", -12.797523498535156 ], [ "folosirea", -12.797574996948242 ], [ "▁aufgenommen", -12.797590255737305 ], [ "▁Thread", -12.797649383544922 ], [ "registering", -12.797694206237793 ], [ "▁Loop", -12.797747611999512 ], [ "innovation", -12.79783821105957 ], [ "▁elimination", -12.797857284545898 ], [ "136", -12.797883987426758 ], [ "▁fluctu", -12.797892570495605 ], [ "▁Mercury", -12.79794692993164 ], [ "▁bouche", -12.797955513000488 ], [ "▁hurdle", -12.7979736328125 ], [ "▁Bennett", -12.798040390014648 ], [ "STI", -12.79818344116211 ], [ "▁théâtre", -12.798316955566406 ], [ "▁confortable", -12.798359870910645 ], [ "▁Automobil", -12.79838752746582 ], [ "▁Donna", -12.798399925231934 ], [ "▁foyer", -12.79841136932373 ], [ "▁hollow", -12.798465728759766 ], [ "▁règlement", -12.79861068725586 ], [ "effi", -12.798616409301758 ], [ "▁sediment", -12.79869270324707 ], [ "▁Mä", -12.798774719238281 ], [ "▁faint", -12.798833847045898 ], [ "feti", -12.79890251159668 ], [ "▁Concord", -12.798959732055664 ], [ "▁Ladies", -12.798990249633789 ], [ "▁pregatit", -12.799052238464355 ], [ "▁Ensemble", -12.79905891418457 ], [ "▁Ingredient", -12.79905891418457 ], [ "▁Respond", -12.79914379119873 ], [ "▁impaired", -12.799356460571289 ], [ "▁Feedback", -12.799430847167969 ], [ "▁ultrasound", -12.799461364746094 ], [ "▁Guvernului", -12.799617767333984 ], [ "▁Unterricht", -12.799654006958008 ], [ "▁prosecut", -12.799662590026855 ], [ "spend", -12.799732208251953 ], [ "▁capitol", -12.799800872802734 ], [ "USD", -12.799822807312012 ], [ "observing", -12.799947738647461 ], [ "▁effortlessly", -12.800045013427734 ], [ "▁Setting", -12.80010986328125 ], [ "▁spontaneous", -12.80020809173584 ], [ "▁LEGO", -12.800238609313965 ], [ "initiative", -12.800299644470215 ], [ "▁Sak", -12.800299644470215 ], [ "Interestingly", -12.800326347351074 ], [ "▁Yale", -12.800352096557617 ], [ "▁größer", -12.80038070678711 ], [ "RIC", -12.800406455993652 ], [ "▁distracted", -12.800436973571777 ], [ "drafted", -12.800484657287598 ], [ "▁Brenda", -12.800522804260254 ], [ "monopol", -12.800551414489746 ], [ "städt", -12.800580024719238 ], [ "▁altar", -12.80058765411377 ], [ "▁Hannover", -12.800596237182617 ], [ "▁Spiritual", -12.800702095031738 ], [ "▁thriller", -12.800747871398926 ], [ "▁Schneider", -12.800760269165039 ], [ "▁accumulate", -12.800817489624023 ], [ "▁mediului", -12.800822257995605 ], [ "▁Mathematics", -12.800914764404297 ], [ "▁paradox", -12.800986289978027 ], [ "▁Sham", -12.801230430603027 ], [ "▁SITE", -12.801375389099121 ], [ "▁echipei", -12.801508903503418 ], [ "▁staircase", -12.801660537719727 ], [ "▁întrebări", -12.801705360412598 ], [ "Commerce", -12.802020072937012 ], [ "▁selfie", -12.802353858947754 ], [ "▁Pocket", -12.802404403686523 ], [ "▁niemand", -12.80263614654541 ], [ "Tool", -12.802678108215332 ], [ "igma", -12.802695274353027 ], [ "utilisant", -12.802915573120117 ], [ "▁negatively", -12.80295181274414 ], [ "Secondly", -12.802955627441406 ], [ "▁ROI", -12.8030366897583 ], [ "Arch", -12.803121566772461 ], [ "▁continuity", -12.80318546295166 ], [ "▁Prayer", -12.803235054016113 ], [ "inverse", -12.803241729736328 ], [ "▁Himmel", -12.803336143493652 ], [ "prinz", -12.803478240966797 ], [ "wichtigen", -12.803496360778809 ], [ "étage", -12.803522109985352 ], [ "summe", -12.8036527633667 ], [ "▁Zeitung", -12.80366039276123 ], [ "▁realization", -12.803897857666016 ], [ "▁influent", -12.804291725158691 ], [ "▁Valid", -12.804357528686523 ], [ "▁publicity", -12.804439544677734 ], [ "▁vertreten", -12.804447174072266 ], [ "▁Shoes", -12.804609298706055 ], [ "▁Diabetes", -12.80463695526123 ], [ "▁anticipation", -12.804670333862305 ], [ "▁Blank", -12.8047456741333 ], [ "asked", -12.804899215698242 ], [ "Power", -12.804938316345215 ], [ "arrelage", -12.805140495300293 ], [ "▁appraisal", -12.80538272857666 ], [ "▁harassment", -12.805542945861816 ], [ "Anzeige", -12.805682182312012 ], [ "liners", -12.80584716796875 ], [ "Firstly", -12.805851936340332 ], [ "transferring", -12.805951118469238 ], [ "▁Diane", -12.806012153625488 ], [ "▁1/2\"", -12.80606746673584 ], [ "▁adrenal", -12.806131362915039 ], [ "▁Prague", -12.806208610534668 ], [ "insertion", -12.80635929107666 ], [ "▁Fahrer", -12.806465148925781 ], [ "▁divin", -12.806585311889648 ], [ "▁douche", -12.80673885345459 ], [ "▁meticulous", -12.806879043579102 ], [ "▁IEEE", -12.806981086730957 ], [ "▁Rabatt", -12.807259559631348 ], [ "Runner", -12.807342529296875 ], [ "▁Leder", -12.807429313659668 ], [ "project", -12.80745792388916 ], [ "▁Split", -12.807562828063965 ], [ "Gold", -12.807600021362305 ], [ "5.00", -12.807629585266113 ], [ "iola", -12.807655334472656 ], [ "standardized", -12.807890892028809 ], [ "ordination", -12.807984352111816 ], [ "▁Egal", -12.808158874511719 ], [ "▁ruhig", -12.808241844177246 ], [ "▁judiciar", -12.80837345123291 ], [ "▁Nowadays", -12.808374404907227 ], [ "▁whistle", -12.808374404907227 ], [ "▁superhero", -12.808379173278809 ], [ "▁PowerPoint", -12.808408737182617 ], [ "flop", -12.808420181274414 ], [ "olph", -12.808460235595703 ], [ "▁pallet", -12.808916091918945 ], [ "posons", -12.809005737304688 ], [ "▁Listing", -12.809032440185547 ], [ "Tag", -12.809075355529785 ], [ "introductory", -12.809122085571289 ], [ "▁Profil", -12.809123992919922 ], [ "symmetric", -12.809126853942871 ], [ "▁aisle", -12.809138298034668 ], [ "▁ajouté", -12.809147834777832 ], [ "opathy", -12.809149742126465 ], [ "prezentate", -12.809155464172363 ], [ "▁hurry", -12.809165000915527 ], [ "Auth", -12.809310913085938 ], [ "▁Homepage", -12.809435844421387 ], [ "ashes", -12.809489250183105 ], [ "▁inklusive", -12.809496879577637 ], [ "populated", -12.809502601623535 ], [ "▁nein", -12.809554100036621 ], [ "▁syndicat", -12.809690475463867 ], [ "▁développé", -12.809842109680176 ], [ "▁Domestic", -12.809877395629883 ], [ "essay", -12.809967994689941 ], [ "Atelier", -12.809980392456055 ], [ "▁proceeding", -12.810006141662598 ], [ "▁SAS", -12.810038566589355 ], [ "task", -12.810063362121582 ], [ "▁blackjack", -12.810114860534668 ], [ "Key", -12.810186386108398 ], [ "thérapie", -12.810247421264648 ], [ "▁Cohen", -12.810397148132324 ], [ "Direct", -12.810510635375977 ], [ "▁Estimat", -12.810517311096191 ], [ "élève", -12.810616493225098 ], [ "cind", -12.810640335083008 ], [ "▁prezenț", -12.810701370239258 ], [ "▁notorious", -12.810725212097168 ], [ "climbed", -12.810816764831543 ], [ "▁flexibil", -12.810830116271973 ], [ "▁entlang", -12.810855865478516 ], [ "longed", -12.81103515625 ], [ "▁elbow", -12.811078071594238 ], [ "BH", -12.811296463012695 ], [ "▁Radu", -12.811376571655273 ], [ "▁lonely", -12.811378479003906 ], [ "ALA", -12.811405181884766 ], [ "Variante", -12.811639785766602 ], [ "▁Influen", -12.81169319152832 ], [ "▁Budapest", -12.811747550964355 ], [ "▁Gemüse", -12.811747550964355 ], [ "▁continental", -12.811750411987305 ], [ "ippo", -12.811771392822266 ], [ "▁Affordable", -12.81212329864502 ], [ "▁niece", -12.812187194824219 ], [ "oscopic", -12.812190055847168 ], [ "▁Grid", -12.81222152709961 ], [ "sliced", -12.812270164489746 ], [ "▁voici", -12.812294006347656 ], [ "aveam", -12.812471389770508 ], [ "▁Lars", -12.812612533569336 ], [ "APA", -12.812657356262207 ], [ "▁particulière", -12.812858581542969 ], [ "sorb", -12.8128662109375 ], [ "▁1955", -12.812887191772461 ], [ "▁solutii", -12.812942504882812 ], [ "loch", -12.812960624694824 ], [ "▁summon", -12.813212394714355 ], [ "wurf", -12.813271522521973 ], [ "▁protecți", -12.813288688659668 ], [ "2001", -12.813499450683594 ], [ "▁sophomore", -12.813627243041992 ], [ "▁Schwerpunkt", -12.813628196716309 ], [ "▁diplomat", -12.813687324523926 ], [ "▁artistique", -12.813726425170898 ], [ "▁accueille", -12.813739776611328 ], [ "Disp", -12.813746452331543 ], [ "inherited", -12.813764572143555 ], [ "▁COMP", -12.813889503479004 ], [ "▁envoyé", -12.814046859741211 ], [ "▁tuning", -12.814056396484375 ], [ "▁entspricht", -12.814062118530273 ], [ "▁exerc", -12.81406307220459 ], [ "▁accessoires", -12.8140869140625 ], [ "▁Automat", -12.814348220825195 ], [ "importance", -12.814408302307129 ], [ "▁travellers", -12.814432144165039 ], [ "seiten", -12.814474105834961 ], [ "▁slider", -12.814481735229492 ], [ "effect", -12.814591407775879 ], [ "▁siding", -12.814669609069824 ], [ "▁Crit", -12.814780235290527 ], [ "▁sportif", -12.814827919006348 ], [ "▁Accessories", -12.81513500213623 ], [ "▁Anteil", -12.815184593200684 ], [ "▁limbi", -12.81519603729248 ], [ "▁vendre", -12.815269470214844 ], [ "borg", -12.815435409545898 ], [ "▁Deposit", -12.815508842468262 ], [ "▁Hö", -12.815717697143555 ], [ "employé", -12.8157320022583 ], [ "▁Bangalore", -12.815887451171875 ], [ "▁itinerary", -12.815888404846191 ], [ "▁Deliver", -12.816008567810059 ], [ "dik", -12.816024780273438 ], [ "▁advent", -12.816100120544434 ], [ "▁Turk", -12.81614875793457 ], [ "▁Nico", -12.816154479980469 ], [ "organizarea", -12.816161155700684 ], [ "▁remport", -12.816166877746582 ], [ "▁tribunal", -12.816266059875488 ], [ "▁Rusia", -12.8162841796875 ], [ "glazed", -12.816339492797852 ], [ "▁destiné", -12.816502571105957 ], [ "304", -12.816533088684082 ], [ "album", -12.816650390625 ], [ "▁junction", -12.81665325164795 ], [ "▁Fleet", -12.816664695739746 ], [ "venant", -12.81667423248291 ], [ "▁buddy", -12.816694259643555 ], [ "▁neglected", -12.816694259643555 ], [ "▁Mask", -12.816783905029297 ], [ "▁testament", -12.816844940185547 ], [ "▁Basil", -12.81690788269043 ], [ "masă", -12.816922187805176 ], [ "▁racist", -12.81692886352539 ], [ "640", -12.816990852355957 ], [ "▁Standing", -12.817028045654297 ], [ "▁MUST", -12.817266464233398 ], [ "situation", -12.817327499389648 ], [ "▁informiert", -12.817337036132812 ], [ "ABA", -12.817353248596191 ], [ "▁Timothy", -12.817397117614746 ], [ "▁generosity", -12.817397117614746 ], [ "▁erscheint", -12.817402839660645 ], [ "▁verarbeitet", -12.81740665435791 ], [ "▁burial", -12.817444801330566 ], [ "▁limestone", -12.817458152770996 ], [ "▁1953", -12.817480087280273 ], [ "▁Lucr", -12.817506790161133 ], [ "small", -12.817633628845215 ], [ "aveau", -12.81763744354248 ], [ "versiune", -12.81773567199707 ], [ "▁inkl", -12.81775951385498 ], [ "▁Minneapolis", -12.81777572631836 ], [ "Spiel", -12.81781005859375 ], [ "▁encode", -12.817895889282227 ], [ "▁beforehand", -12.818021774291992 ], [ "▁Vital", -12.818086624145508 ], [ "▁socialist", -12.818228721618652 ], [ "inho", -12.81824779510498 ], [ "▁chapel", -12.81825065612793 ], [ "▁Monitoring", -12.81838607788086 ], [ "▁quotidienne", -12.818404197692871 ], [ "cloud", -12.818506240844727 ], [ "▁desfăşur", -12.818531036376953 ], [ "▁1952", -12.818638801574707 ], [ "▁Rü", -12.818690299987793 ], [ "▁Sigma", -12.818804740905762 ], [ "134", -12.818835258483887 ], [ "Sullivan", -12.818909645080566 ], [ "▁Bevölkerung", -12.818909645080566 ], [ "▁sufficiently", -12.818953514099121 ], [ "Check", -12.818992614746094 ], [ "rnie", -12.8190336227417 ], [ "contamin", -12.819132804870605 ], [ "▁gewonnen", -12.81928825378418 ], [ "▁bugetul", -12.819376945495605 ], [ "▁mustard", -12.819414138793945 ], [ "132", -12.819478988647461 ], [ "0.9", -12.819535255432129 ], [ "▁tratat", -12.81957721710205 ], [ "▁dilemma", -12.819666862487793 ], [ "▁versatility", -12.819666862487793 ], [ "▁clutter", -12.819670677185059 ], [ "▁Musk", -12.81973934173584 ], [ "▁Beide", -12.819750785827637 ], [ "hurst", -12.819758415222168 ], [ "atsu", -12.819767951965332 ], [ "absence", -12.819784164428711 ], [ "rebounds", -12.819881439208984 ], [ "6.1", -12.820029258728027 ], [ "Dia", -12.820046424865723 ], [ "▁siguranță", -12.820060729980469 ], [ "▁Blade", -12.820072174072266 ], [ "▁disrupt", -12.820074081420898 ], [ "▁visiteurs", -12.820169448852539 ], [ "tested", -12.820282936096191 ], [ "▁Lup", -12.820353507995605 ], [ "▁Rouge", -12.820371627807617 ], [ "▁asbestos", -12.82042407989502 ], [ "▁moisturize", -12.820427894592285 ], [ "▁acknowledg", -12.82045841217041 ], [ "▁procent", -12.820467948913574 ], [ "▁swear", -12.82050895690918 ], [ "▁911", -12.820647239685059 ], [ "präsent", -12.820724487304688 ], [ "▁cohort", -12.82072639465332 ], [ "▁intimid", -12.820830345153809 ], [ "JS", -12.820849418640137 ], [ "îm", -12.82096004486084 ], [ "▁Kunststoff", -12.820963859558105 ], [ "rison", -12.820972442626953 ], [ "▁praf", -12.82097339630127 ], [ "▁convient", -12.821019172668457 ], [ "▁partenaire", -12.821088790893555 ], [ "▁Verantwortlich", -12.821182250976562 ], [ "▁semiconductor", -12.821182250976562 ], [ "▁kürz", -12.821187019348145 ], [ "▁Bottom", -12.821187973022461 ], [ "▁tratamentul", -12.82127571105957 ], [ "Source", -12.821331024169922 ], [ "authored", -12.82172679901123 ], [ "robo", -12.821867942810059 ], [ "▁turf", -12.82194709777832 ], [ "▁liebe", -12.821971893310547 ], [ "▁Fotografi", -12.821995735168457 ], [ "Big", -12.822064399719238 ], [ "▁fireworks", -12.822081565856934 ], [ "▁presă", -12.822135925292969 ], [ "▁conceal", -12.822269439697266 ], [ "▁originated", -12.82227897644043 ], [ "▁biciclet", -12.822319984436035 ], [ "acești", -12.822577476501465 ], [ "▁mortar", -12.822585105895996 ], [ "▁Wunder", -12.822626113891602 ], [ "ionist", -12.822696685791016 ], [ "KM", -12.822871208190918 ], [ "▁Marion", -12.822918891906738 ], [ "produkte", -12.822933197021484 ], [ "▁Sprint", -12.822999000549316 ], [ "▁Nachde", -12.8230619430542 ], [ "▁verfüge", -12.823100090026855 ], [ "Marea", -12.823177337646484 ], [ "▁compressor", -12.823253631591797 ], [ "Arm", -12.823290824890137 ], [ "Auf", -12.823311805725098 ], [ "▁Polyester", -12.823461532592773 ], [ "▁Sheffield", -12.823461532592773 ], [ "illiard", -12.823494911193848 ], [ "▁misleading", -12.82353401184082 ], [ "multi", -12.823749542236328 ], [ "ripped", -12.82381820678711 ], [ "▁Cosmetic", -12.82383918762207 ], [ "▁Regal", -12.823890686035156 ], [ "▁authenticity", -12.82414436340332 ], [ "▁customizable", -12.824219703674316 ], [ "▁bathtub", -12.824275016784668 ], [ "▁Average", -12.824292182922363 ], [ "▁Muster", -12.824522018432617 ], [ "290", -12.824529647827148 ], [ "▁Ersatz", -12.824570655822754 ], [ "▁Might", -12.824588775634766 ], [ "published", -12.82461929321289 ], [ "▁Interpret", -12.824640274047852 ], [ "▁încep", -12.82480239868164 ], [ "▁proto", -12.824851036071777 ], [ "▁disque", -12.824889183044434 ], [ "▁Palestine", -12.824980735778809 ], [ "Over", -12.824981689453125 ], [ "▁verbessert", -12.824983596801758 ], [ "▁liefern", -12.825017929077148 ], [ "▁Handlung", -12.825095176696777 ], [ "▁Handels", -12.825150489807129 ], [ "▁eater", -12.825201988220215 ], [ "▁$40", -12.825251579284668 ], [ "illard", -12.825334548950195 ], [ "▁apariti", -12.825413703918457 ], [ "▁gag", -12.825422286987305 ], [ "▁chimic", -12.825541496276855 ], [ "▁Guru", -12.825594902038574 ], [ "▁Toilet", -12.82571792602539 ], [ "▁Tochter", -12.825748443603516 ], [ "▁Aurora", -12.82579231262207 ], [ "contro", -12.825922966003418 ], [ "▁GOP", -12.825995445251465 ], [ "Provence", -12.826130867004395 ], [ "▁Frieden", -12.82614803314209 ], [ "ăci", -12.826216697692871 ], [ "portée", -12.826268196105957 ], [ "▁upright", -12.826300621032715 ], [ "▁Physician", -12.82650375366211 ], [ "▁juridique", -12.82650375366211 ], [ "▁territorial", -12.82650375366211 ], [ "▁kindergarten", -12.826505661010742 ], [ "aéroport", -12.826510429382324 ], [ "▁whisper", -12.826513290405273 ], [ "▁capacities", -12.826562881469727 ], [ "dichte", -12.826641082763672 ], [ "▁Grenzen", -12.826822280883789 ], [ "▁Riv", -12.82710075378418 ], [ "épreuve", -12.827266693115234 ], [ "▁Scheme", -12.827290534973145 ], [ "mesures", -12.827330589294434 ], [ "▁Einfluss", -12.827333450317383 ], [ "appui", -12.827713966369629 ], [ "▁apuc", -12.827827453613281 ], [ "▁radiat", -12.82794189453125 ], [ "▁allergy", -12.828035354614258 ], [ "▁spear", -12.828038215637207 ], [ "▁Luxembourg", -12.828086853027344 ], [ "▁Registered", -12.828115463256836 ], [ "▁Shape", -12.828198432922363 ], [ "genie", -12.828328132629395 ], [ "nsonsten", -12.828385353088379 ], [ "▁Symposium", -12.828412055969238 ], [ "forderung", -12.828474998474121 ], [ "▁personalizat", -12.82866096496582 ], [ "▁ştiu", -12.82875919342041 ], [ "blatt", -12.828804016113281 ], [ "▁geometry", -12.828807830810547 ], [ "▁8:30", -12.828831672668457 ], [ "▁Fahrrad", -12.828861236572266 ], [ "After", -12.828927040100098 ], [ "▁ventilat", -12.829072952270508 ], [ "▁nylon", -12.829190254211426 ], [ "▁verkauft", -12.829304695129395 ], [ "öß", -12.829345703125 ], [ "▁Kath", -12.829523086547852 ], [ "▁Nuclear", -12.829558372497559 ], [ "▁Verizon", -12.829560279846191 ], [ "▁spokesperson", -12.829560279846191 ], [ "▁vietii", -12.829560279846191 ], [ "▁prescri", -12.829629898071289 ], [ "ру", -12.829666137695312 ], [ "6.2", -12.829801559448242 ], [ "▁spațiu", -12.830018997192383 ], [ "▁solvent", -12.83006763458252 ], [ ",000,000", -12.830142974853516 ], [ "reuen", -12.830185890197754 ], [ "plast", -12.830245018005371 ], [ "▁Activities", -12.830334663391113 ], [ "▁domni", -12.83056926727295 ], [ "▁trophy", -12.830572128295898 ], [ "▁saddle", -12.830657958984375 ], [ "▁renovat", -12.830708503723145 ], [ "▁bumper", -12.830717086791992 ], [ "▁penny", -12.830741882324219 ], [ "omato", -12.830743789672852 ], [ "AQ", -12.83083438873291 ], [ "kunst", -12.830843925476074 ], [ "hydrat", -12.830860137939453 ], [ "minder", -12.830931663513184 ], [ "trecerea", -12.830949783325195 ], [ "brush", -12.831185340881348 ], [ "TEC", -12.83121395111084 ], [ "Please", -12.831253051757812 ], [ "hydrated", -12.831483840942383 ], [ "ICAL", -12.831636428833008 ], [ "trauen", -12.831639289855957 ], [ "9,000", -12.83175277709961 ], [ "▁2030", -12.831830024719238 ], [ "▁Chennai", -12.831854820251465 ], [ "▁empirical", -12.831854820251465 ], [ "▁Subscribe", -12.83206844329834 ], [ "▁vorgestellt", -12.832120895385742 ], [ "▁Springfield", -12.832159996032715 ], [ "▁continuu", -12.832311630249023 ], [ "208", -12.832351684570312 ], [ "▁Bearing", -12.83240795135498 ], [ "2003", -12.832572937011719 ], [ "cheta", -12.832608222961426 ], [ "▁empathy", -12.832623481750488 ], [ "▁Alert", -12.832817077636719 ], [ "▁recreate", -12.832879066467285 ], [ "PJ", -12.833159446716309 ], [ "Name", -12.83323860168457 ], [ "▁Mouse", -12.833405494689941 ], [ "▁disturbing", -12.833443641662598 ], [ "▁leichter", -12.83344841003418 ], [ "▁cruel", -12.833507537841797 ], [ "▁detective", -12.833531379699707 ], [ "▁reimbursement", -12.833626747131348 ], [ "▁Gemeinschaft", -12.833772659301758 ], [ "▁adolescents", -12.833772659301758 ], [ "▁Reality", -12.833954811096191 ], [ "▁Stockholm", -12.83415699005127 ], [ "▁Gründen", -12.834304809570312 ], [ "▁Reflect", -12.83432388305664 ], [ "▁Palmer", -12.834336280822754 ], [ "▁treac", -12.8343505859375 ], [ "▁tentative", -12.834497451782227 ], [ "▁surrender", -12.834677696228027 ], [ "▁broadly", -12.834734916687012 ], [ "▁județ", -12.834814071655273 ], [ "▁Thu", -12.834845542907715 ], [ "wärts", -12.834961891174316 ], [ "▁crește", -12.835074424743652 ], [ "▁déplacement", -12.835208892822266 ], [ "blanc", -12.835268020629883 ], [ "▁£5", -12.835308074951172 ], [ "▁confidentiality", -12.835320472717285 ], [ "veraging", -12.835444450378418 ], [ "unité", -12.835609436035156 ], [ "clar", -12.83564567565918 ], [ "rigg", -12.835693359375 ], [ "honneur", -12.835694313049316 ], [ "▁adventurous", -12.835694313049316 ], [ "▁Nutzen", -12.835758209228516 ], [ "▁Kabel", -12.835800170898438 ], [ "empowering", -12.836040496826172 ], [ "verhalten", -12.836042404174805 ], [ "▁prevail", -12.8361234664917 ], [ "mashed", -12.836138725280762 ], [ "▁1947", -12.83616828918457 ], [ "function", -12.836292266845703 ], [ "niveaux", -12.83633041381836 ], [ "▁territories", -12.836463928222656 ], [ "▁Permanent", -12.836465835571289 ], [ "▁christmas", -12.836471557617188 ], [ "arguing", -12.836490631103516 ], [ "zukünftig", -12.836654663085938 ], [ "▁Eindruck", -12.836817741394043 ], [ "personalised", -12.836854934692383 ], [ "▁vecin", -12.837211608886719 ], [ "▁Affiliate", -12.837234497070312 ], [ "▁Silk", -12.837249755859375 ], [ "▁Tub", -12.837440490722656 ], [ "▁remont", -12.837493896484375 ], [ "▁sauber", -12.837530136108398 ], [ "gehörig", -12.837562561035156 ], [ "Maritime", -12.83771800994873 ], [ "▁Bö", -12.837973594665527 ], [ "▁1957", -12.83800220489502 ], [ "▁unparalleled", -12.838005065917969 ], [ "▁fulfillment", -12.838042259216309 ], [ "▁collage", -12.838179588317871 ], [ "fenders", -12.838248252868652 ], [ "▁neige", -12.838275909423828 ], [ "▁gamers", -12.838325500488281 ], [ "tefan", -12.838339805603027 ], [ "▁wifi", -12.838349342346191 ], [ "▁leisten", -12.83835506439209 ], [ "▁Verbesserung", -12.838390350341797 ], [ "▁composant", -12.838400840759277 ], [ "▁LORD", -12.8384370803833 ], [ "arrive", -12.838472366333008 ], [ "▁conquer", -12.838562965393066 ], [ "▁lentil", -12.838767051696777 ], [ "▁Sprech", -12.838995933532715 ], [ "▁substitution", -12.839015007019043 ], [ ".05.", -12.839020729064941 ], [ "FORM", -12.839144706726074 ], [ "cădere", -12.839154243469238 ], [ "▁canyon", -12.839430809020996 ], [ "▁capacitate", -12.839442253112793 ], [ "▁menace", -12.839461326599121 ], [ "▁Antique", -12.839519500732422 ], [ "▁dizaine", -12.839550971984863 ], [ "▁Saturn", -12.839578628540039 ], [ "▁gastro", -12.83962631225586 ], [ "▁Vand", -12.839641571044922 ], [ "▁africa", -12.839682579040527 ], [ "▁hackers", -12.839702606201172 ], [ "▁Bailey", -12.839736938476562 ], [ "ouette", -12.839822769165039 ], [ "hoch", -12.839885711669922 ], [ "étudiant", -12.839973449707031 ], [ "▁1600", -12.840004920959473 ], [ "utiliz", -12.840167999267578 ], [ "reinigung", -12.840263366699219 ], [ "▁mileage", -12.84029483795166 ], [ "▁consacré", -12.840309143066406 ], [ "▁Norfolk", -12.840327262878418 ], [ "stacked", -12.840659141540527 ], [ "anbieter", -12.840731620788574 ], [ "▁gewünschte", -12.84073543548584 ], [ "▁silicon", -12.840761184692383 ], [ "Ensuite", -12.840794563293457 ], [ "▁vendu", -12.840850830078125 ], [ "▁viteza", -12.840851783752441 ], [ "▁evaluare", -12.840913772583008 ], [ "▁contient", -12.841036796569824 ], [ "▁Viagra", -12.841100692749023 ], [ "▁circumstance", -12.841283798217773 ], [ "walker", -12.841383934020996 ], [ "▁Aluminium", -12.84148120880127 ], [ "ço", -12.841556549072266 ], [ "▁Kli", -12.841643333435059 ], [ "▁deliberately", -12.841649055480957 ], [ "▁gamble", -12.841893196105957 ], [ "▁nourri", -12.841903686523438 ], [ "▁sealing", -12.84194278717041 ], [ "▁Atmosphäre", -12.842255592346191 ], [ "▁erschien", -12.842260360717773 ], [ "▁brightness", -12.842340469360352 ], [ "autonomie", -12.84251594543457 ], [ "▁propel", -12.842525482177734 ], [ "▁Infrastructure", -12.842642784118652 ], [ "▁război", -12.842642784118652 ], [ "▁jelly", -12.842684745788574 ], [ "scalable", -12.84280776977539 ], [ "regal", -12.84296703338623 ], [ "▁sarcini", -12.843031883239746 ], [ "▁Dienstag", -12.84304428100586 ], [ "▁Receive", -12.8430814743042 ], [ "▁mango", -12.843356132507324 ], [ "▁compétition", -12.84341812133789 ], [ "▁Monument", -12.843428611755371 ], [ "▁mast", -12.844159126281738 ], [ "▁instructed", -12.84425163269043 ], [ "▁aventur", -12.844277381896973 ], [ "139", -12.844298362731934 ], [ "▁Parmi", -12.84435749053955 ], [ "confined", -12.844416618347168 ], [ "acious", -12.844441413879395 ], [ "▁simptome", -12.844581604003906 ], [ "▁Fischer", -12.844897270202637 ], [ "störung", -12.844985008239746 ], [ "▁bilateral", -12.84504508972168 ], [ "preşedintele", -12.845274925231934 ], [ "accueillir", -12.845357894897461 ], [ "▁Schmidt", -12.845359802246094 ], [ "litis", -12.845373153686523 ], [ "WL", -12.8454008102417 ], [ "▁Rise", -12.845436096191406 ], [ "▁streamline", -12.845556259155273 ], [ "sozialen", -12.845585823059082 ], [ "▁Emirates", -12.845746040344238 ], [ "▁encrypted", -12.845746040344238 ], [ "▁unfamiliar", -12.845746040344238 ], [ "established", -12.84577751159668 ], [ "▁Tätigkeit", -12.845818519592285 ], [ "▁unaware", -12.845913887023926 ], [ "2:00", -12.8460054397583 ], [ "macher", -12.846013069152832 ], [ "NSA", -12.8461275100708 ], [ "▁rutier", -12.846177101135254 ], [ "▁Trent", -12.846212387084961 ], [ "▁sickness", -12.846277236938477 ], [ "▁advert", -12.846417427062988 ], [ "▁Kranken", -12.846426963806152 ], [ "▁Sandra", -12.846443176269531 ], [ "▁Recreation", -12.846449851989746 ], [ "▁Evidence", -12.846524238586426 ], [ "▁Immigration", -12.846524238586426 ], [ "▁carriage", -12.846524238586426 ], [ "▁justified", -12.84655475616455 ], [ "▁veche", -12.846579551696777 ], [ "PGA", -12.846604347229004 ], [ "▁Carmen", -12.846735000610352 ], [ "▁Faites", -12.846750259399414 ], [ "▁erfüllt", -12.84691333770752 ], [ "▁voilà", -12.846931457519531 ], [ "▁împlin", -12.846959114074707 ], [ "deposited", -12.84721565246582 ], [ "▁decisiv", -12.847241401672363 ], [ "CSA", -12.847249031066895 ], [ "pathy", -12.84726619720459 ], [ "▁erweitert", -12.847302436828613 ], [ "▁liquor", -12.847302436828613 ], [ "▁resilient", -12.847302436828613 ], [ "▁walmart", -12.847302436828613 ], [ "▁fencing", -12.847308158874512 ], [ "▁dépasse", -12.84731388092041 ], [ "KT", -12.847354888916016 ], [ "▁fries", -12.847368240356445 ], [ "vadă", -12.847421646118164 ], [ "▁Spania", -12.847478866577148 ], [ "▁complètement", -12.847725868225098 ], [ "▁lucrari", -12.84777545928955 ], [ "▁Lieb", -12.847908973693848 ], [ "leistungen", -12.847943305969238 ], [ "198", -12.847979545593262 ], [ "▁Schnell", -12.847997665405273 ], [ "▁radius", -12.84814453125 ], [ "▁beneficiaries", -12.848151206970215 ], [ "▁northwest", -12.848174095153809 ], [ "▁ -12.848223686218262 ], [ "▁embryo", -12.848492622375488 ], [ "▁ditch", -12.848791122436523 ], [ "▁Seriously", -12.848859786987305 ], [ "oppel", -12.848941802978516 ], [ "▁stalk", -12.849053382873535 ], [ "écriture", -12.849066734313965 ], [ "512", -12.84912109375 ], [ "wiesen", -12.849271774291992 ], [ "▁Consum", -12.849321365356445 ], [ "▁lună", -12.849405288696289 ], [ "▁lantern", -12.849441528320312 ], [ "▁italian", -12.849629402160645 ], [ "▁achiziți", -12.849639892578125 ], [ "▁catalyst", -12.849639892578125 ], [ "▁Arbeitgeber", -12.849662780761719 ], [ "▁researched", -12.8496675491333 ], [ "▁drastically", -12.849679946899414 ], [ "versammlung", -12.849735260009766 ], [ "410", -12.849800109863281 ], [ "▁impus", -12.850153923034668 ], [ "▁interchange", -12.850173950195312 ], [ "▁pharmacie", -12.850215911865234 ], [ "Live", -12.850354194641113 ], [ "dents", -12.850384712219238 ], [ "▁charcoal", -12.850419998168945 ], [ "▁odihn", -12.850420951843262 ], [ "▁pistol", -12.850444793701172 ], [ "▁complaining", -12.850576400756836 ], [ "manager", -12.850578308105469 ], [ "themed", -12.850578308105469 ], [ "▁Chang", -12.850650787353516 ], [ "▁rookie", -12.85070514678955 ], [ "Great", -12.850706100463867 ], [ "▁smoker", -12.850733757019043 ], [ "▁Container", -12.850812911987305 ], [ "▁bancaire", -12.850852966308594 ], [ "▁Actual", -12.850966453552246 ], [ "füllen", -12.850982666015625 ], [ "forum", -12.850985527038574 ], [ "bleib", -12.851073265075684 ], [ "▁combi", -12.851079940795898 ], [ "smoked", -12.851137161254883 ], [ "difficultés", -12.851161003112793 ], [ "▁tactical", -12.851240158081055 ], [ "▁sichtbar", -12.851483345031738 ], [ "▁dreptate", -12.851598739624023 ], [ "ERT", -12.85168743133545 ], [ "▁Pond", -12.85177993774414 ], [ "▁Holly", -12.851844787597656 ], [ "erfolg", -12.8518705368042 ], [ "▁Nordic", -12.851896286010742 ], [ "évènement", -12.851983070373535 ], [ "embracing", -12.851984024047852 ], [ "▁Maximum", -12.851984024047852 ], [ "▁défend", -12.85205078125 ], [ "▁fruct", -12.852056503295898 ], [ "▁Conditioning", -12.852099418640137 ], [ "LG", -12.852127075195312 ], [ "exigence", -12.852166175842285 ], [ "amide", -12.852187156677246 ], [ "▁darunter", -12.852208137512207 ], [ "▁EVERY", -12.852420806884766 ], [ "▁comparat", -12.85244083404541 ], [ "boosting", -12.852452278137207 ], [ "▁Hawaiian", -12.852553367614746 ], [ "▁Geburt", -12.852752685546875 ], [ "deci", -12.852782249450684 ], [ "▁Apollo", -12.852803230285645 ], [ "▁schützen", -12.852821350097656 ], [ "tragere", -12.852893829345703 ], [ "Online", -12.852904319763184 ], [ "▁neural", -12.852913856506348 ], [ "▁lucrez", -12.853188514709473 ], [ "▁phenomenal", -12.853253364562988 ], [ "▁Height", -12.853368759155273 ], [ "coordinating", -12.853548049926758 ], [ "geschnitten", -12.853631019592285 ], [ "auront", -12.853641510009766 ], [ "▁administer", -12.853644371032715 ], [ "▁contend", -12.853707313537598 ], [ "▁crispy", -12.853784561157227 ], [ "chuck", -12.854011535644531 ], [ "▁Condition", -12.8540678024292 ], [ "gestaltung", -12.854324340820312 ], [ "▁Blvd", -12.854331970214844 ], [ "▁subjective", -12.854470252990723 ], [ "▁événements", -12.854708671569824 ], [ "▁Jenny", -12.855131149291992 ], [ "▁cumpăra", -12.85519027709961 ], [ "constructing", -12.855262756347656 ], [ "▁instructional", -12.85539436340332 ], [ "▁sterling", -12.855446815490723 ], [ "scrise", -12.855470657348633 ], [ "▁Boulevard", -12.855551719665527 ], [ "pipe", -12.855620384216309 ], [ "▁Pride", -12.855748176574707 ], [ "▁Kau", -12.855751991271973 ], [ "▁overhaul", -12.855924606323242 ], [ "▁Recruitment", -12.855925559997559 ], [ "▁thrilling", -12.856218338012695 ], [ "living", -12.856302261352539 ], [ "▁rămân", -12.85645866394043 ], [ "▁MOD", -12.85661792755127 ], [ "▁Newport", -12.856675148010254 ], [ "▁infectious", -12.856688499450684 ], [ "6-3", -12.856860160827637 ], [ "▁Apache", -12.856976509094238 ], [ "▁dependence", -12.85698413848877 ], [ "nutzung", -12.857199668884277 ], [ "praised", -12.857211112976074 ], [ "▁craving", -12.857346534729004 ], [ "▁cramp", -12.857397079467773 ], [ "▁mancare", -12.857455253601074 ], [ "▁entdeckt", -12.857474327087402 ], [ "▁Pioneer", -12.857484817504883 ], [ "▁Adelaide", -12.857490539550781 ], [ "2.0", -12.857503890991211 ], [ "168", -12.857526779174805 ], [ "▁Decorating", -12.857611656188965 ], [ "▁unpleasant", -12.857854843139648 ], [ "▁déclaration", -12.857865333557129 ], [ "▁Grafik", -12.857908248901367 ], [ "5-2", -12.857937812805176 ], [ "căci", -12.857940673828125 ], [ "▁invade", -12.858171463012695 ], [ "▁internaţional", -12.858259201049805 ], [ "▁fraudulent", -12.858281135559082 ], [ "▁crestere", -12.858441352844238 ], [ "ografic", -12.858729362487793 ], [ "plină", -12.859140396118164 ], [ "sunteti", -12.859150886535645 ], [ "/04", -12.859176635742188 ], [ "▁admis", -12.85935115814209 ], [ "▁mediation", -12.859403610229492 ], [ "ICC", -12.859424591064453 ], [ "roș", -12.859660148620605 ], [ "▁Aroma", -12.8596773147583 ], [ "1:00", -12.859792709350586 ], [ "gasesc", -12.859822273254395 ], [ "▁Defence", -12.859850883483887 ], [ "▁dictionary", -12.859856605529785 ], [ "▁Batterie", -12.859865188598633 ], [ "▁gesunde", -12.85997486114502 ], [ "146", -12.860099792480469 ], [ "▁mortal", -12.860129356384277 ], [ "▁Flughafen", -12.860230445861816 ], [ "hhh", -12.860284805297852 ], [ "▁novice", -12.860342025756836 ], [ "▁Develop", -12.86043930053711 ], [ "▁accidental", -12.860516548156738 ], [ "Muzeul", -12.86054515838623 ], [ "▁Jupiter", -12.86062240600586 ], [ "supposedly", -12.860662460327148 ], [ "energy", -12.860758781433105 ], [ "▁montrer", -12.860764503479004 ], [ "recalled", -12.860795021057129 ], [ "Press", -12.860801696777344 ], [ "▁postcard", -12.86080265045166 ], [ "target", -12.86081600189209 ], [ "▁vêtements", -12.860881805419922 ], [ "▁particle", -12.860888481140137 ], [ "professional", -12.8608980178833 ], [ "▁1949", -12.860917091369629 ], [ "yah", -12.860980033874512 ], [ "▁Spiegel", -12.861017227172852 ], [ "▁Jeffrey", -12.861023902893066 ], [ "fahrzeug", -12.861027717590332 ], [ "▁Plug", -12.861051559448242 ], [ "▁violin", -12.861150741577148 ], [ "▁condemn", -12.861381530761719 ], [ "▁conducere", -12.861398696899414 ], [ "▁Chevrolet", -12.861412048339844 ], [ "▁conceput", -12.861461639404297 ], [ "▁Merri", -12.861493110656738 ], [ "judging", -12.861559867858887 ], [ "embraced", -12.86168098449707 ], [ "▁Compact", -12.861715316772461 ], [ "▁château", -12.861807823181152 ], [ "etch", -12.861945152282715 ], [ "bedroom", -12.861995697021484 ], [ "People", -12.862038612365723 ], [ "25,000", -12.86209774017334 ], [ "ocyte", -12.862146377563477 ], [ "▁Lenovo", -12.862205505371094 ], [ "▁Hampton", -12.862241744995117 ], [ "5.2", -12.862244606018066 ], [ "▁progres", -12.862266540527344 ], [ "hoc", -12.862288475036621 ], [ "▁complementary", -12.86241340637207 ], [ "turned", -12.862485885620117 ], [ "mangel", -12.862508773803711 ], [ "▁Drew", -12.862592697143555 ], [ "épisode", -12.86259651184082 ], [ "▁Versorgung", -12.86259651184082 ], [ "▁ausdrücklich", -12.86259651184082 ], [ "ciune", -12.862788200378418 ], [ "▁sfârșit", -12.862990379333496 ], [ "Agricultural", -12.862991333007812 ], [ "▁caffeine", -12.862991333007812 ], [ "▁emergencies", -12.862991333007812 ], [ "▁unhappy", -12.862991333007812 ], [ "(7)", -12.863043785095215 ], [ "▁inlocui", -12.863059043884277 ], [ "▁Rochester", -12.863153457641602 ], [ "183", -12.863155364990234 ], [ "niz", -12.863285064697266 ], [ "tasche", -12.863462448120117 ], [ "▁Salle", -12.86347484588623 ], [ "cît", -12.863478660583496 ], [ "▁Singer", -12.863489151000977 ], [ "▁economically", -12.863506317138672 ], [ "▁ieși", -12.863525390625 ], [ "▁façade", -12.86378288269043 ], [ "Ohne", -12.863801956176758 ], [ "▁edible", -12.863842964172363 ], [ "Rob", -12.863851547241211 ], [ "▁(2014)", -12.863859176635742 ], [ "▁Zar", -12.863919258117676 ], [ "▁obey", -12.863995552062988 ], [ "Pack", -12.864087104797363 ], [ "▁Omni", -12.864198684692383 ], [ "▁Gilbert", -12.864212036132812 ], [ "▁Vlad", -12.86429500579834 ], [ "▁pauvre", -12.864333152770996 ], [ "▁secular", -12.864383697509766 ], [ "Center", -12.864415168762207 ], [ "▁Prospect", -12.864457130432129 ], [ "▁Noah", -12.86450481414795 ], [ "▁Interactive", -12.86471176147461 ], [ "▁centaine", -12.86485767364502 ], [ "▁cerebral", -12.864971160888672 ], [ "▁Novel", -12.865013122558594 ], [ "▁Käufer", -12.865039825439453 ], [ "werfen", -12.865056991577148 ], [ "▁reluctant", -12.865143775939941 ], [ "ес", -12.86520004272461 ], [ "Look", -12.86521053314209 ], [ "Erkrankung", -12.86536693572998 ], [ "▁cucumber", -12.86536693572998 ], [ "/2017", -12.865399360656738 ], [ "▁flank", -12.865405082702637 ], [ "opportunité", -12.865667343139648 ], [ "zugleich", -12.865766525268555 ], [ "RAT", -12.865840911865234 ], [ "▁avantages", -12.865880012512207 ], [ "▁außer", -12.866008758544922 ], [ "GV", -12.866090774536133 ], [ "▁Continental", -12.866159439086914 ], [ "▁affiliation", -12.866159439086914 ], [ "▁ursprünglich", -12.86618423461914 ], [ "▁hardship", -12.866349220275879 ], [ "âme", -12.86647891998291 ], [ "▁hallway", -12.866576194763184 ], [ "▁afară", -12.866578102111816 ], [ "western", -12.866714477539062 ], [ "▁Jacket", -12.866802215576172 ], [ "▁culturelle", -12.866876602172852 ], [ "▁glaci", -12.866995811462402 ], [ "metoda", -12.867036819458008 ], [ "▁clerk", -12.867045402526855 ], [ "▁ordinance", -12.867185592651367 ], [ "▁Initial", -12.867197036743164 ], [ "waking", -12.86722469329834 ], [ "▁Secondary", -12.867366790771484 ], [ "▁Solomon", -12.867411613464355 ], [ "glomer", -12.867488861083984 ], [ "SYS", -12.867530822753906 ], [ "▁Florin", -12.867596626281738 ], [ "ffentlich", -12.867670059204102 ], [ "▁Printer", -12.867674827575684 ], [ "▁dimineata", -12.86774730682373 ], [ "▁stripes", -12.867748260498047 ], [ "plugged", -12.86776065826416 ], [ "öhl", -12.867836952209473 ], [ "infused", -12.867875099182129 ], [ "▁Rubber", -12.867895126342773 ], [ "paved", -12.867898941040039 ], [ "▁Devi", -12.867995262145996 ], [ "▁subway", -12.8681640625 ], [ "▁gases", -12.868306159973145 ], [ "▁reguli", -12.868371963500977 ], [ "▁Rebel", -12.868413925170898 ], [ "▁destructive", -12.868546485900879 ], [ "▁oferind", -12.868664741516113 ], [ "9001", -12.868876457214355 ], [ "CRA", -12.868912696838379 ], [ "why", -12.868932723999023 ], [ "sensul", -12.869036674499512 ], [ "guter", -12.869277000427246 ], [ "Empfehlung", -12.869338035583496 ], [ "▁convertible", -12.86953353881836 ], [ "▁predominantly", -12.869637489318848 ], [ "▁Mentor", -12.869649887084961 ], [ "Practic", -12.869720458984375 ], [ "▁echipă", -12.869754791259766 ], [ "onsite", -12.869853019714355 ], [ "▁zunehmend", -12.86994743347168 ], [ "▁Harbour", -12.870016098022461 ], [ "▁pineapple", -12.870133399963379 ], [ "▁gasoline", -12.870139122009277 ], [ "▁Jaguar", -12.870158195495605 ], [ "kno", -12.870259284973145 ], [ "▁heap", -12.870448112487793 ], [ "▁fictional", -12.870481491088867 ], [ "fiinta", -12.870753288269043 ], [ "▁Amber", -12.87081241607666 ], [ "▁Exclusive", -12.870929718017578 ], [ "▁Pharmaceutical", -12.870929718017578 ], [ "▁unterscheide", -12.871044158935547 ], [ "▁1942", -12.871116638183594 ], [ "▁Ceiling", -12.87115478515625 ], [ "developed", -12.871228218078613 ], [ "▁consacr", -12.87132453918457 ], [ "▁Membr", -12.871411323547363 ], [ "erton", -12.871447563171387 ], [ "habitation", -12.871685981750488 ], [ "▁longevity", -12.871726989746094 ], [ "▁Starbucks", -12.871728897094727 ], [ "▁poat", -12.871771812438965 ], [ "▁commissioner", -12.871794700622559 ], [ "pedia", -12.871938705444336 ], [ "popped", -12.872468948364258 ], [ "versorgung", -12.872525215148926 ], [ "▁Aktivitäten", -12.872525215148926 ], [ "▁Betreuung", -12.872525215148926 ], [ "▁afacere", -12.872968673706055 ], [ "▁Mechanical", -12.873323440551758 ], [ "▁Leiter", -12.873346328735352 ], [ "▁scaling", -12.873427391052246 ], [ "▁Slim", -12.87350082397461 ], [ "▁temperaturi", -12.873516082763672 ], [ "ACH", -12.873558044433594 ], [ "▁jährlich", -12.873682022094727 ], [ "▁photographie", -12.873722076416016 ], [ "▁préalable", -12.873725891113281 ], [ "▁părinți", -12.87372875213623 ], [ "▁Farmers", -12.873873710632324 ], [ "▁Printable", -12.873905181884766 ], [ "Früh", -12.873908996582031 ], [ "approved", -12.87398624420166 ], [ "otro", -12.874094009399414 ], [ "▁veneer", -12.874099731445312 ], [ "▁Warriors", -12.874122619628906 ], [ "▁Approach", -12.874149322509766 ], [ "Share", -12.874238967895508 ], [ "▁buds", -12.874252319335938 ], [ "▁Într", -12.874330520629883 ], [ "glichen", -12.87452507019043 ], [ "▁anbieten", -12.87452507019043 ], [ "MET", -12.874539375305176 ], [ "amélioration", -12.87468147277832 ], [ "ländische", -12.87468433380127 ], [ "nsgesamt", -12.874764442443848 ], [ "einiger", -12.874822616577148 ], [ "▁Förderung", -12.874876022338867 ], [ "destroying", -12.874910354614258 ], [ "▁accreditation", -12.874922752380371 ], [ "reminiscent", -12.875094413757324 ], [ "▁retriev", -12.87528133392334 ], [ "▁Flü", -12.875306129455566 ], [ "▁Monsieur", -12.875322341918945 ], [ "German", -12.87536334991455 ], [ "Orice", -12.875443458557129 ], [ "künftig", -12.875523567199707 ], [ "▁vorbi", -12.875639915466309 ], [ "▁intentionally", -12.875733375549316 ], [ "▁îngrij", -12.875743865966797 ], [ "▁laughed", -12.875850677490234 ], [ "▁Fiction", -12.875913619995117 ], [ "▁inteligent", -12.875914573669434 ], [ "▁Translation", -12.875953674316406 ], [ "greete", -12.875983238220215 ], [ "▁énergétique", -12.876123428344727 ], [ "uncovered", -12.876248359680176 ], [ "▁évidemment", -12.876523971557617 ], [ "▁Vietnamese", -12.876535415649414 ], [ "▁Libya", -12.876675605773926 ], [ "▁Trailer", -12.876734733581543 ], [ "▁Wohl", -12.876871109008789 ], [ "▁Congo", -12.87698745727539 ], [ "▁freut", -12.877002716064453 ], [ "zauber", -12.877090454101562 ], [ "▁Pân", -12.877142906188965 ], [ "▁mentine", -12.877333641052246 ], [ "▁welding", -12.877335548400879 ], [ "▁Mircea", -12.8773775100708 ], [ "▁optimism", -12.877455711364746 ], [ "VEL", -12.877504348754883 ], [ "oilea", -12.877540588378906 ], [ "▁thereafter", -12.877612113952637 ], [ "▁André", -12.877710342407227 ], [ "forschung", -12.877799987792969 ], [ "running", -12.878022193908691 ], [ "▁hostile", -12.878059387207031 ], [ "Homme", -12.87811279296875 ], [ "▁Satellite", -12.878129005432129 ], [ "▁collagen", -12.87841796875 ], [ "▁concedi", -12.878518104553223 ], [ "▁produziert", -12.87852954864502 ], [ "▁virgin", -12.878540992736816 ], [ "frant", -12.87857723236084 ], [ "▁teammates", -12.878744125366211 ], [ "▁faceti", -12.878802299499512 ], [ "▁Restoration", -12.87893295288086 ], [ "▁detached", -12.878935813903809 ], [ "▁Instructor", -12.878950119018555 ], [ "montag", -12.879227638244629 ], [ "▁borrowing", -12.879375457763672 ], [ "▁Retro", -12.879446983337402 ], [ "▁behandelt", -12.879536628723145 ], [ "▁Aussage", -12.879715919494629 ], [ "▁snorkel", -12.879734992980957 ], [ "▁Proceedings", -12.879754066467285 ], [ "▁Judy", -12.879776000976562 ], [ "▁Wendy", -12.879783630371094 ], [ "artă", -12.879920959472656 ], [ "▁Vergangenheit", -12.88013744354248 ], [ "▁Gegner", -12.880139350891113 ], [ "▁ulcer", -12.880166053771973 ], [ "wirksam", -12.880553245544434 ], [ "▁închis", -12.880560874938965 ], [ "▁emission", -12.88068962097168 ], [ "ulescu", -12.880754470825195 ], [ "▁bancar", -12.880819320678711 ], [ "compromising", -12.880924224853516 ], [ "▁Priest", -12.881156921386719 ], [ "▁Progress", -12.881318092346191 ], [ "▁punish", -12.88144588470459 ], [ "▁Afin", -12.881450653076172 ], [ "▁Bog", -12.881514549255371 ], [ "lunii", -12.881525039672852 ], [ "▁ressembl", -12.881570816040039 ], [ "▁Creation", -12.881644248962402 ], [ "effet", -12.881668090820312 ], [ "Versicherung", -12.881671905517578 ], [ "médias", -12.881672859191895 ], [ "▁Kritik", -12.881793975830078 ], [ "idia", -12.881896018981934 ], [ "▁Wasch", -12.881929397583008 ], [ "UAL", -12.882059097290039 ], [ "Approximately", -12.882149696350098 ], [ "izari", -12.882152557373047 ], [ "▁Dortmund", -12.882152557373047 ], [ "▁contul", -12.882343292236328 ], [ "▁Airways", -12.882408142089844 ], [ "sicherung", -12.882535934448242 ], [ "échelle", -12.882560729980469 ], [ "ADD", -12.882582664489746 ], [ "DIA", -12.88259506225586 ], [ "kabel", -12.882621765136719 ], [ "Media", -12.88268756866455 ], [ "ampli", -12.882894515991211 ], [ "▁quarry", -12.88295841217041 ], [ "▁acoper", -12.883072853088379 ], [ "halter", -12.883326530456543 ], [ "▁solicitor", -12.883684158325195 ], [ "phosphat", -12.883763313293457 ], [ "▁drown", -12.883773803710938 ], [ "congratulat", -12.884047508239746 ], [ "▁uneven", -12.884087562561035 ], [ "▁rupe", -12.884154319763184 ], [ "▁heureux", -12.88417911529541 ], [ "caractéristiques", -12.884221076965332 ], [ "60,000", -12.884283065795898 ], [ "ambigu", -12.884340286254883 ], [ "224", -12.884417533874512 ], [ "dov", -12.88454532623291 ], [ "▁Naturally", -12.884629249572754 ], [ "▁Ernst", -12.884634017944336 ], [ "Camp", -12.884757995605469 ], [ "▁Worldwide", -12.884909629821777 ], [ "▁antrenament", -12.885042190551758 ], [ "▁jocul", -12.88521671295166 ], [ "▁broccoli", -12.88537883758545 ], [ "▁fascinated", -12.88537883758545 ], [ "▁Abbey", -12.885387420654297 ], [ "▁aquarium", -12.885390281677246 ], [ "HAN", -12.885458946228027 ], [ "chaffung", -12.885480880737305 ], [ "137", -12.885503768920898 ], [ "rumors", -12.885515213012695 ], [ "reliance", -12.885557174682617 ], [ "▁vaccination", -12.8856782913208 ], [ "responsabilitate", -12.885777473449707 ], [ "▁legislati", -12.885782241821289 ], [ "ATT", -12.885826110839844 ], [ "206", -12.885896682739258 ], [ "▁miere", -12.885967254638672 ], [ "▁rezultatul", -12.885988235473633 ], [ "părea", -12.88599681854248 ], [ "zuführen", -12.886159896850586 ], [ "▁Kompetenz", -12.886187553405762 ], [ "▁nickname", -12.886195182800293 ], [ "pilot", -12.88620376586914 ], [ "▁ninth", -12.886252403259277 ], [ "▁Tyr", -12.886446952819824 ], [ "▁misuse", -12.886469841003418 ], [ "▁SUP", -12.886514663696289 ], [ "▁Attack", -12.88667106628418 ], [ "Smart", -12.88669490814209 ], [ "▁Philosoph", -12.886930465698242 ], [ "▁Alege", -12.886931419372559 ], [ "▁femeile", -12.886967658996582 ], [ "▁Heating", -12.88698673248291 ], [ "▁Cricket", -12.886999130249023 ], [ "▁scholar", -12.887049674987793 ], [ "Model", -12.887073516845703 ], [ "▁stimulating", -12.887182235717773 ], [ "▁industrielle", -12.887189865112305 ], [ "▁phenomena", -12.887303352355957 ], [ "▁Nahrung", -12.887414932250977 ], [ "▁Conditioner", -12.887433052062988 ], [ "führ", -12.887489318847656 ], [ "▁révolution", -12.88757610321045 ], [ "plastic", -12.887595176696777 ], [ "▁approximate", -12.887596130371094 ], [ "▁dienen", -12.887624740600586 ], [ "▁obsession", -12.887807846069336 ], [ "▁rectangular", -12.887807846069336 ], [ "Allemagne", -12.887808799743652 ], [ "▁Tanzania", -12.887824058532715 ], [ "border", -12.887884140014648 ], [ "▁crashed", -12.887958526611328 ], [ "visor", -12.887974739074707 ], [ "▁autorizat", -12.888072967529297 ], [ "▁Champagne", -12.888222694396973 ], [ "längst", -12.888238906860352 ], [ "▁realities", -12.888314247131348 ], [ "▁Keyword", -12.88831615447998 ], [ "▁GUI", -12.888495445251465 ], [ "▁simplified", -12.88865852355957 ], [ "▁Rack", -12.888681411743164 ], [ "▁Zahlen", -12.888693809509277 ], [ "growth", -12.888897895812988 ], [ "▁rehearsal", -12.888991355895996 ], [ "▁Epic", -12.888999938964844 ], [ "▁réussite", -12.889195442199707 ], [ "▁politician", -12.889263153076172 ], [ "▁emoți", -12.889378547668457 ], [ "▁delegation", -12.889449119567871 ], [ "▁со", -12.889464378356934 ], [ "oversized", -12.889477729797363 ], [ "▁Motto", -12.889481544494629 ], [ "1860", -12.889788627624512 ], [ "▁defective", -12.889803886413574 ], [ "brewing", -12.889852523803711 ], [ "linguistic", -12.890243530273438 ], [ "▁Hopkins", -12.890265464782715 ], [ "▁(2012)", -12.89030933380127 ], [ "crease", -12.890436172485352 ], [ "▁Versicherungs", -12.89052677154541 ], [ "▁Noble", -12.890752792358398 ], [ "▁Bekannt", -12.890896797180176 ], [ "▁vorstellen", -12.89095401763916 ], [ "▁suburban", -12.890970230102539 ], [ "DAC", -12.890995025634766 ], [ "▁scatter", -12.89103889465332 ], [ "▁Artificial", -12.8910551071167 ], [ "▁reactor", -12.891073226928711 ], [ "▁modelling", -12.89108943939209 ], [ "▁Holder", -12.891148567199707 ], [ "athon", -12.891149520874023 ], [ "147", -12.891190528869629 ], [ "▁stagn", -12.891257286071777 ], [ "ARY", -12.891261100769043 ], [ "Space", -12.89126968383789 ], [ "▁Gibson", -12.891718864440918 ], [ "▁Investigator", -12.89173698425293 ], [ "▁1914", -12.891818046569824 ], [ "▁Muhammad", -12.891868591308594 ], [ "▁shove", -12.892073631286621 ], [ "▁erklären", -12.892276763916016 ], [ "▁abdomen", -12.892277717590332 ], [ "▁Mazda", -12.892349243164062 ], [ "▁hemo", -12.892364501953125 ], [ "National", -12.892455101013184 ], [ "starken", -12.89267635345459 ], [ "▁Cyprus", -12.892683982849121 ], [ "▁tread", -12.892721176147461 ], [ "▁sweetness", -12.892725944519043 ], [ "stunden", -12.892790794372559 ], [ "▁couverture", -12.893059730529785 ], [ "▁Successful", -12.893060684204102 ], [ "▁oublier", -12.893171310424805 ], [ "▁esential", -12.893203735351562 ], [ "estival", -12.89321231842041 ], [ "gnac", -12.893280029296875 ], [ "▁Basement", -12.893457412719727 ], [ "presumably", -12.893497467041016 ], [ "▁mourn", -12.893561363220215 ], [ "armée", -12.893677711486816 ], [ "148", -12.893845558166504 ], [ "▁residue", -12.894006729125977 ], [ "▁metalic", -12.89404296875 ], [ "▁Zell", -12.89425277709961 ], [ "Build", -12.894280433654785 ], [ "▁prevalence", -12.894312858581543 ], [ "▁wrestling", -12.894312858581543 ], [ "▁ascuns", -12.894325256347656 ], [ "Sacred", -12.894340515136719 ], [ "Tec", -12.89438533782959 ], [ "▁Kindergarten", -12.894389152526855 ], [ "bindung", -12.894464492797852 ], [ "▁ritm", -12.894545555114746 ], [ "▁triste", -12.894651412963867 ], [ "▁introdus", -12.894758224487305 ], [ "/2016", -12.894824028015137 ], [ "▁română", -12.894899368286133 ], [ "▁bibli", -12.89490032196045 ], [ "▁cigar", -12.894913673400879 ], [ "Rie", -12.894990921020508 ], [ "▁intentional", -12.894999504089355 ], [ "▁cuprins", -12.895098686218262 ], [ "remarkably", -12.895129203796387 ], [ "▁printemps", -12.895133972167969 ], [ "▁declining", -12.895171165466309 ], [ "Magazin", -12.89552116394043 ], [ "▁săptămână", -12.895537376403809 ], [ "▁vérifier", -12.895549774169922 ], [ "▁Speise", -12.895584106445312 ], [ "▁reteta", -12.8956298828125 ], [ "heed", -12.895772933959961 ], [ "▁Compliance", -12.895946502685547 ], [ "▁embroidery", -12.895946502685547 ], [ "cried", -12.896025657653809 ], [ "▁(„", -12.896282196044922 ], [ "▁heck", -12.89629077911377 ], [ "▁sadness", -12.896501541137695 ], [ "▁impulse", -12.896585464477539 ], [ "ATH", -12.896740913391113 ], [ "▁lavender", -12.896773338317871 ], [ "uiesc", -12.896790504455566 ], [ "▁Disorder", -12.896876335144043 ], [ "stroke", -12.896991729736328 ], [ "▁piaţ", -12.8970365524292 ], [ "ournée", -12.897049903869629 ], [ "▁Barnes", -12.8971586227417 ], [ "▁scăzut", -12.897172927856445 ], [ "▁équipements", -12.89725112915039 ], [ "OND", -12.897375106811523 ], [ "▁Compet", -12.897424697875977 ], [ "▁Bestell", -12.89748477935791 ], [ "▁immédiatement", -12.897587776184082 ], [ "aparut", -12.89759635925293 ], [ "▁rainfall", -12.897882461547852 ], [ "oreille", -12.89797306060791 ], [ "▁ministère", -12.898014068603516 ], [ "iris", -12.898140907287598 ], [ "dyna", -12.898279190063477 ], [ "drücken", -12.898343086242676 ], [ "▁détect", -12.89834976196289 ], [ "▁fonctionnalité", -12.89840030670166 ], [ "▁imbalance", -12.89840030670166 ], [ "▁unpredictable", -12.89840030670166 ], [ "▁literar", -12.89846134185791 ], [ "▁Windsor", -12.898472785949707 ], [ "▁Unlimited", -12.898481369018555 ], [ "colour", -12.898674964904785 ], [ "▁Portfolio", -12.898810386657715 ], [ "149", -12.898883819580078 ], [ "volution", -12.898890495300293 ], [ "▁folgende", -12.899078369140625 ], [ "▁arbitration", -12.899105072021484 ], [ "kicking", -12.89913558959961 ], [ "zügig", -12.89923095703125 ], [ "▁1941", -12.899311065673828 ], [ "▁Drake", -12.89955997467041 ], [ "▁ausführlich", -12.899630546569824 ], [ "▁chaussure", -12.899630546569824 ], [ "▁intestinal", -12.89976692199707 ], [ "▁pilgrim", -12.900040626525879 ], [ "▁Bark", -12.900142669677734 ], [ "between", -12.900157928466797 ], [ "disposed", -12.900175094604492 ], [ "▁Dylan", -12.900218963623047 ], [ "ств", -12.900253295898438 ], [ "NOR", -12.900287628173828 ], [ "traces", -12.90038776397705 ], [ "▁moindre", -12.900500297546387 ], [ "▁$10,000", -12.900552749633789 ], [ "212", -12.900599479675293 ], [ "wusste", -12.900659561157227 ], [ "▁predictable", -12.900671005249023 ], [ "poţi", -12.900679588317871 ], [ "▁Celsius", -12.900860786437988 ], [ "gebunden", -12.90086841583252 ], [ "▁Legacy", -12.900891304016113 ], [ "movers", -12.90090274810791 ], [ "▁concret", -12.90098762512207 ], [ "▁simpla", -12.901050567626953 ], [ "rechnet", -12.901103973388672 ], [ "▁certainty", -12.901144981384277 ], [ "entrepreneurship", -12.901153564453125 ], [ "kohl", -12.901289939880371 ], [ "▁curte", -12.901311874389648 ], [ "▁Forbes", -12.901411056518555 ], [ "▁Zusatz", -12.901535987854004 ], [ "blending", -12.90163803100586 ], [ "▁variat", -12.901642799377441 ], [ "▁galaxy", -12.90168285369873 ], [ "▁safari", -12.90168571472168 ], [ "▁municipalities", -12.9017972946167 ], [ "▁Drept", -12.90180778503418 ], [ "aufnahme", -12.902128219604492 ], [ "▁endorse", -12.902223587036133 ], [ "einrichtung", -12.902244567871094 ], [ "Sync", -12.902270317077637 ], [ "abide", -12.902323722839355 ], [ "brushed", -12.902350425720215 ], [ "▁actiune", -12.902410507202148 ], [ "quaint", -12.902498245239258 ], [ "▁volatility", -12.902504920959473 ], [ "▁repetitive", -12.902505874633789 ], [ "▁découvr", -12.902560234069824 ], [ "Totodat", -12.902585983276367 ], [ "▁românesc", -12.902682304382324 ], [ "▁tempting", -12.902772903442383 ], [ "thesis", -12.902947425842285 ], [ "secure", -12.903013229370117 ], [ "delt", -12.903019905090332 ], [ "▁şef", -12.903167724609375 ], [ "▁epidemic", -12.903326988220215 ], [ "▁Appliance", -12.903327941894531 ], [ "cearcă", -12.903331756591797 ], [ "▁lodging", -12.903361320495605 ], [ "▁photographed", -12.903507232666016 ], [ "geschlagen", -12.903794288635254 ], [ "▁Methodist", -12.90380859375 ], [ "▁Transit", -12.90389347076416 ], [ "▁Länder", -12.903934478759766 ], [ "villa", -12.903986930847168 ], [ "▁toilette", -12.904031753540039 ], [ "anno", -12.904074668884277 ], [ "▁Aufnahme", -12.904091835021973 ], [ "▁Coral", -12.904099464416504 ], [ "pourraient", -12.904129981994629 ], [ "▁digestion", -12.904245376586914 ], [ "▁Vacation", -12.904274940490723 ], [ "▁Rugby", -12.904275894165039 ], [ "MIC", -12.904311180114746 ], [ "▁choc", -12.904417991638184 ], [ "2002", -12.904492378234863 ], [ "gestion", -12.904674530029297 ], [ "▁Zoom", -12.904745101928711 ], [ "essor", -12.904763221740723 ], [ "weighed", -12.904793739318848 ], [ "▁dispus", -12.904987335205078 ], [ "▁redemption", -12.90502643585205 ], [ "▁plaster", -12.905071258544922 ], [ "▁Quilt", -12.90507698059082 ], [ "▁teritoriul", -12.905088424682617 ], [ "ndern", -12.905097961425781 ], [ "▁expired", -12.905105590820312 ], [ "▁Tribunal", -12.905122756958008 ], [ "occupation", -12.9052152633667 ], [ "▁woodland", -12.905248641967773 ], [ "vieux", -12.905254364013672 ], [ "▁Midland", -12.905465126037598 ], [ "gât", -12.90571117401123 ], [ "électricité", -12.905800819396973 ], [ "▁vanzare", -12.905811309814453 ], [ "biologi", -12.905961036682129 ], [ "▁vive", -12.906060218811035 ], [ "▁Alarm", -12.906097412109375 ], [ "▁experiență", -12.9061279296875 ], [ "▁Loch", -12.906133651733398 ], [ "▁Pedro", -12.906194686889648 ], [ "▁detergent", -12.906217575073242 ], [ "language", -12.906554222106934 ], [ "▁sedan", -12.906655311584473 ], [ "▁Brady", -12.906736373901367 ], [ "▁compus", -12.906976699829102 ], [ "▁landfill", -12.906982421875 ], [ "giu", -12.907039642333984 ], [ "beziehung", -12.9070405960083 ], [ "▁picior", -12.907184600830078 ], [ "ALI", -12.907235145568848 ], [ "▁Commander", -12.907256126403809 ], [ "EPS", -12.907303810119629 ], [ "▁Textil", -12.907320022583008 ], [ "▁industria", -12.907339096069336 ], [ "lox", -12.907365798950195 ], [ "▁eclectic", -12.907453536987305 ], [ "▁gracious", -12.907477378845215 ], [ "Uniunea", -12.907525062561035 ], [ "bps", -12.90754222869873 ], [ "▁entertained", -12.907634735107422 ], [ "depinde", -12.907767295837402 ], [ "▁daylight", -12.907893180847168 ], [ "▁résistance", -12.907995223999023 ], [ "ARN", -12.908194541931152 ], [ "▁unavailable", -12.908201217651367 ], [ "Curtea", -12.908390045166016 ], [ "▁pores", -12.908502578735352 ], [ "▁Tonight", -12.908649444580078 ], [ "▁datori", -12.90869426727295 ], [ "▁gezielt", -12.908703804016113 ], [ "▁rupture", -12.90875244140625 ], [ "▁disput", -12.908848762512207 ], [ "▁sonstige", -12.908895492553711 ], [ "▁Ordnung", -12.90910816192627 ], [ "▁beschrieben", -12.909114837646484 ], [ "▁Rainbow", -12.90911865234375 ], [ "▁Werkzeug", -12.909136772155762 ], [ "GIN", -12.909354209899902 ], [ "facilitating", -12.909490585327148 ], [ "hunt", -12.90955638885498 ], [ "▁Serving", -12.909673690795898 ], [ "Writ", -12.909692764282227 ], [ "requisite", -12.909798622131348 ], [ "▁Kerry", -12.90989875793457 ], [ "▁riesig", -12.909957885742188 ], [ "▁Healing", -12.91030502319336 ], [ "▁1954", -12.910365104675293 ], [ "▁mousse", -12.910428047180176 ], [ "▁Positive", -12.910764694213867 ], [ "embodie", -12.910772323608398 ], [ "▁penetrate", -12.910774230957031 ], [ "endorsed", -12.910882949829102 ], [ "▁situatia", -12.910927772521973 ], [ "▁Unity", -12.911083221435547 ], [ "142", -12.911102294921875 ], [ "▁farmhouse", -12.911138534545898 ], [ "▁Handbook", -12.911368370056152 ], [ "▁symbolic", -12.911378860473633 ], [ "pristine", -12.911439895629883 ], [ "moitié", -12.911595344543457 ], [ "▁Sessions", -12.912017822265625 ], [ "technisch", -12.912116050720215 ], [ "▁lesquel", -12.912148475646973 ], [ "▁electronically", -12.912208557128906 ], [ "▁modificat", -12.912240982055664 ], [ "▁adjoin", -12.912242889404297 ], [ "actualité", -12.912256240844727 ], [ "vati", -12.91229248046875 ], [ "VENT", -12.912299156188965 ], [ "▁salsa", -12.912333488464355 ], [ "acupunctur", -12.912424087524414 ], [ "▁Opportunity", -12.912424087524414 ], [ "▁Inspection", -12.912425994873047 ], [ "▁vereinbart", -12.912425994873047 ], [ "▁Residents", -12.912426948547363 ], [ "▁perennial", -12.91242790222168 ], [ "CHAN", -12.912555694580078 ], [ "Search", -12.912572860717773 ], [ "UTE", -12.912696838378906 ], [ "▁Lens", -12.912703514099121 ], [ "▁Banner", -12.91281509399414 ], [ "aménagement", -12.912839889526367 ], [ "▁Decision", -12.91286849975586 ], [ "▁ferr", -12.912869453430176 ], [ "▁Transformation", -12.912878036499023 ], [ "▁Stamm", -12.912955284118652 ], [ "▁Galerie", -12.913003921508789 ], [ "onny", -12.913126945495605 ], [ "▁caption", -12.913195610046387 ], [ "▁viitorul", -12.91323471069336 ], [ "▁professionelle", -12.913281440734863 ], [ "drepturile", -12.913294792175293 ], [ "ylon", -12.913345336914062 ], [ "Société", -12.913387298583984 ], [ "AIS", -12.913456916809082 ], [ "March", -12.91350269317627 ], [ "▁Rav", -12.91357707977295 ], [ "▁1946", -12.913691520690918 ], [ "accompagnement", -12.913713455200195 ], [ "Liviu", -12.913716316223145 ], [ "▁Appeal", -12.913826942443848 ], [ "▁sentir", -12.913952827453613 ], [ "▁Indigenous", -12.914087295532227 ], [ "▁wizard", -12.914087295532227 ], [ "▁collateral", -12.914127349853516 ], [ "▁Proof", -12.914324760437012 ], [ "▁prze", -12.914398193359375 ], [ "▁obținut", -12.91450309753418 ], [ "COP", -12.914629936218262 ], [ "▁obiect", -12.914681434631348 ], [ "▁isolate", -12.914685249328613 ], [ "▁nieder", -12.914793014526367 ], [ "TECH", -12.914953231811523 ], [ "▁Sharing", -12.914998054504395 ], [ "Ideally", -12.915008544921875 ], [ "▁naked", -12.915059089660645 ], [ "horaire", -12.915130615234375 ], [ "▁prelucrare", -12.915180206298828 ], [ "▁forcément", -12.915349006652832 ], [ "▁ESPN", -12.915403366088867 ], [ "▁southwest", -12.9154634475708 ], [ "▁Timber", -12.915682792663574 ], [ "kleidung", -12.915748596191406 ], [ "MJ", -12.915854454040527 ], [ "Ped", -12.915889739990234 ], [ "▁lymph", -12.916181564331055 ], [ "wärme", -12.916399002075195 ], [ "▁Olivia", -12.916610717773438 ], [ "Ziua", -12.916705131530762 ], [ "reihe", -12.916747093200684 ], [ "▁selfish", -12.916752815246582 ], [ "▁geography", -12.916814804077148 ], [ "▁etaj", -12.916924476623535 ], [ "▁acquis", -12.91698932647705 ], [ "▁rejoin", -12.91701602935791 ], [ "7.1", -12.917097091674805 ], [ "▁paix", -12.91713809967041 ], [ "tirer", -12.917284965515137 ], [ "▁clase", -12.91745662689209 ], [ "▁blink", -12.917572021484375 ], [ "▁Interface", -12.917611122131348 ], [ "nado", -12.917655944824219 ], [ "RIT", -12.91777515411377 ], [ "ESC", -12.918120384216309 ], [ "▁carving", -12.918190002441406 ], [ "▁articolul", -12.918194770812988 ], [ "▁wreath", -12.918258666992188 ], [ "▁propaganda", -12.918266296386719 ], [ "▁Pair", -12.918267250061035 ], [ "▁pamant", -12.91831111907959 ], [ "▁venituri", -12.918357849121094 ], [ "rtz", -12.91835880279541 ], [ "uddle", -12.918529510498047 ], [ "uille", -12.918543815612793 ], [ "▁embed", -12.918654441833496 ], [ "0.05", -12.918655395507812 ], [ "▁Brighton", -12.918718338012695 ], [ "estens", -12.918742179870605 ], [ "▁occupational", -12.918862342834473 ], [ "ем", -12.918890953063965 ], [ "wünsche", -12.919081687927246 ], [ "▁Poetry", -12.91909408569336 ], [ "▁visualize", -12.919109344482422 ], [ "Across", -12.919121742248535 ], [ "▁essentielle", -12.919123649597168 ], [ "beratung", -12.919143676757812 ], [ "▁Guidelines", -12.91919231414795 ], [ "▁Fehl", -12.919198036193848 ], [ "▁liberty", -12.91921329498291 ], [ "▁Investigation", -12.91922378540039 ], [ "▁sunrise", -12.919266700744629 ], [ "▁12:00", -12.919541358947754 ], [ "venind", -12.919583320617676 ], [ "▁lotion", -12.919655799865723 ], [ "conscious", -12.91968822479248 ], [ "logists", -12.91973876953125 ], [ "▁judecător", -12.919893264770508 ], [ "▁Ecuador", -12.919928550720215 ], [ "▁ambulance", -12.91994857788086 ], [ "▁Already", -12.920026779174805 ], [ "▁eröffnet", -12.920090675354004 ], [ "▁naval", -12.92010498046875 ], [ "▁imposibil", -12.92011547088623 ], [ "▁Merry", -12.92011833190918 ], [ "▁Duncan", -12.920272827148438 ], [ "▁léger", -12.9203519821167 ], [ "▁delta", -12.920391082763672 ], [ "▁Machinery", -12.920578002929688 ], [ "▁craftsmanship", -12.920766830444336 ], [ "▁angezeigt", -12.9207763671875 ], [ "▁formidable", -12.9207763671875 ], [ "▁Startup", -12.920878410339355 ], [ "venus", -12.920969009399414 ], [ "▁tannin", -12.921019554138184 ], [ "collaborating", -12.921128273010254 ], [ "▁abrupt", -12.921152114868164 ], [ "emergence", -12.921171188354492 ], [ "Dienstleistungen", -12.921197891235352 ], [ "▁liefert", -12.921217918395996 ], [ "engagement", -12.921222686767578 ], [ "▁maximise", -12.921304702758789 ], [ "modeled", -12.9214448928833 ], [ "▁crane", -12.92148208618164 ], [ "▁effortless", -12.921540260314941 ], [ "▁Buffet", -12.92160701751709 ], [ "8000", -12.921648979187012 ], [ "▁Überblick", -12.921687126159668 ], [ "micro", -12.921981811523438 ], [ "▁vergleichen", -12.92204475402832 ], [ "143", -12.922080993652344 ], [ "5.6", -12.922094345092773 ], [ "▁odata", -12.922131538391113 ], [ "▁interviu", -12.922162055969238 ], [ "▁poliţi", -12.922375679016113 ], [ "plated", -12.922383308410645 ], [ "Roman", -12.922406196594238 ], [ "▁satisfactory", -12.922453880310059 ], [ "▁unanimous", -12.922459602355957 ], [ "▁întâln", -12.922464370727539 ], [ "nonsense", -12.922558784484863 ], [ "▁HOW", -12.922616004943848 ], [ "prezinta", -12.922639846801758 ], [ "▁măsura", -12.9226655960083 ], [ "▁Fuji", -12.92275619506836 ], [ "▁Meaning", -12.92278003692627 ], [ "aspiring", -12.922850608825684 ], [ "▁Suceava", -12.922863006591797 ], [ "arba", -12.922983169555664 ], [ "pressive", -12.922988891601562 ], [ "▁creek", -12.92301082611084 ], [ "trakt", -12.923023223876953 ], [ "▁fluffy", -12.923303604125977 ], [ "▁bateau", -12.923371315002441 ], [ "ме", -12.923545837402344 ], [ "UNG", -12.923609733581543 ], [ "motifs", -12.923907279968262 ], [ "Type", -12.923958778381348 ], [ "perçu", -12.924132347106934 ], [ "singurul", -12.924139022827148 ], [ "▁(2011)", -12.92418384552002 ], [ "▁hemp", -12.924263954162598 ], [ "betroffenen", -12.92431640625 ], [ "▁sermon", -12.924369812011719 ], [ "AID", -12.924545288085938 ], [ "3.7", -12.924627304077148 ], [ "▁heiß", -12.92463207244873 ], [ "▁bolnav", -12.924982070922852 ], [ "First", -12.924995422363281 ], [ "▁interrupt", -12.925040245056152 ], [ "phag", -12.925106048583984 ], [ "235", -12.925201416015625 ], [ "▁discoveries", -12.925262451171875 ], [ "▁Wellington", -12.925263404846191 ], [ "▁wechseln", -12.925298690795898 ], [ "▁strategically", -12.925379753112793 ], [ "▁iphone", -12.925440788269043 ], [ "geteilt", -12.925646781921387 ], [ "generative", -12.925748825073242 ], [ "▁Monroe", -12.925806045532227 ], [ "▁Execut", -12.925863265991211 ], [ "▁knitting", -12.925931930541992 ], [ "▁Couple", -12.925939559936523 ], [ "▁Shade", -12.926020622253418 ], [ "▁Taj", -12.926060676574707 ], [ "950", -12.926077842712402 ], [ "boiled", -12.92609977722168 ], [ "▁mixes", -12.926130294799805 ], [ "betroffene", -12.926156044006348 ], [ "▁continuation", -12.926169395446777 ], [ "▁begleitet", -12.926226615905762 ], [ "▁numerical", -12.926281929016113 ], [ "▁(2013)", -12.92630386352539 ], [ "▁nourish", -12.926399230957031 ], [ "oricar", -12.926485061645508 ], [ "focus", -12.926486015319824 ], [ "▁Crazy", -12.926651000976562 ], [ "▁ascend", -12.926671028137207 ], [ "▁vinde", -12.926855087280273 ], [ "roar", -12.926874160766602 ], [ "Vac", -12.926929473876953 ], [ "▁Zuschauer", -12.927068710327148 ], [ "izeze", -12.927179336547852 ], [ "▁Mindest", -12.92721939086914 ], [ "lingual", -12.927229881286621 ], [ "▁violet", -12.927264213562012 ], [ "▁Opfer", -12.927299499511719 ], [ "ARS", -12.927431106567383 ], [ "4.7", -12.92744255065918 ], [ "millennial", -12.927492141723633 ], [ "▁striv", -12.927639961242676 ], [ "▁bishop", -12.927680015563965 ], [ "▁Durham", -12.927708625793457 ], [ "opathic", -12.927817344665527 ], [ "Where", -12.927999496459961 ], [ "▁Rider", -12.928030014038086 ], [ "▁Reid", -12.928030967712402 ], [ "stumbled", -12.928156852722168 ], [ "deep", -12.92827320098877 ], [ "▁11:00", -12.928340911865234 ], [ "▁Essex", -12.928380966186523 ], [ "▁Analyst", -12.928397178649902 ], [ "feel", -12.928546905517578 ], [ "▁rave", -12.928601264953613 ], [ "▁Eddie", -12.928631782531738 ], [ "▁communiqué", -12.928756713867188 ], [ "[/", -12.928791046142578 ], [ "▁Tho", -12.929011344909668 ], [ "ffentlichkeit", -12.929019927978516 ], [ "instrument", -12.929126739501953 ], [ "▁metropolitan", -12.929179191589355 ], [ "▁experienţ", -12.929181098937988 ], [ "East", -12.929198265075684 ], [ "Compared", -12.929434776306152 ], [ "worn", -12.929484367370605 ], [ "berufliche", -12.92966365814209 ], [ "▁Umstände", -12.929710388183594 ], [ "individuellen", -12.929901123046875 ], [ "siehe", -12.929912567138672 ], [ "▁sfarsit", -12.929969787597656 ], [ "▁Strength", -12.929999351501465 ], [ "▁prejudice", -12.930024147033691 ], [ "▁shutdown", -12.930159568786621 ], [ "chatting", -12.93022346496582 ], [ "▁Gerne", -12.930227279663086 ], [ "▁Yum", -12.930305480957031 ], [ "▁coastline", -12.930387496948242 ], [ "▁headboard", -12.930623054504395 ], [ "▁politische", -12.930768966674805 ], [ "Sub", -12.930838584899902 ], [ "▁Henderson", -12.930870056152344 ], [ "▁astonishing", -12.930870056152344 ], [ "▁Dresden", -12.930871963500977 ], [ "▁strawberry", -12.93088436126709 ], [ "prenez", -12.930889129638672 ], [ "▁Monaco", -12.930912971496582 ], [ "▁empowered", -12.930953025817871 ], [ "fäl", -12.93109130859375 ], [ "▁creier", -12.931120872497559 ], [ "▁Equ", -12.931300163269043 ], [ "▁Selling", -12.931379318237305 ], [ "▁$35", -12.931483268737793 ], [ "konto", -12.931503295898438 ], [ "▁Procedure", -12.931715965270996 ], [ "▁reduziert", -12.931715965270996 ], [ "▁royalty", -12.931740760803223 ], [ "wyn", -12.931756019592285 ], [ "▁Unfall", -12.932141304016113 ], [ "NAT", -12.932161331176758 ], [ "▁grafic", -12.93251895904541 ], [ "▁Collective", -12.932563781738281 ], [ "▁Computing", -12.932564735412598 ], [ "▁Established", -12.932594299316406 ], [ "▁zest", -12.932598114013672 ], [ "venez", -12.932611465454102 ], [ "follow", -12.9326171875 ], [ "▁Motivation", -12.932640075683594 ], [ "▁dictator", -12.932755470275879 ], [ "whichever", -12.93281078338623 ], [ "▁întâmpl", -12.93293285369873 ], [ "Flüchtling", -12.932987213134766 ], [ "EMI", -12.933015823364258 ], [ "404", -12.933019638061523 ], [ "ICK", -12.93302059173584 ], [ "emplacement", -12.933191299438477 ], [ "complete", -12.933349609375 ], [ "advising", -12.933412551879883 ], [ "▁Administrative", -12.933481216430664 ], [ "▁deviation", -12.933496475219727 ], [ "▁experienț", -12.933500289916992 ], [ "lethor", -12.933996200561523 ], [ "▁compress", -12.934081077575684 ], [ "rival", -12.934173583984375 ], [ "reprendre", -12.934186935424805 ], [ "ugi", -12.934266090393066 ], [ "▁Invitation", -12.934267044067383 ], [ "▁retina", -12.934332847595215 ], [ "▁farther", -12.934335708618164 ], [ "▁fenêtre", -12.934799194335938 ], [ "6-7", -12.934815406799316 ], [ "zhou", -12.934834480285645 ], [ "▁Piano", -12.934840202331543 ], [ "▁Congrats", -12.935114860534668 ], [ "▁Configur", -12.935131072998047 ], [ "▁superficial", -12.935179710388184 ], [ "▁melting", -12.935315132141113 ], [ "▁raspunde", -12.935626983642578 ], [ "▁drip", -12.93564224243164 ], [ "östlich", -12.9358491897583 ], [ "189", -12.935925483703613 ], [ "▁Ludwig", -12.935959815979004 ], [ "▁keto", -12.935985565185547 ], [ "▁Bogdan", -12.936013221740723 ], [ "▁contracted", -12.936029434204102 ], [ "▁revive", -12.936100006103516 ], [ "▁cristal", -12.936232566833496 ], [ "▁mailbox", -12.936257362365723 ], [ "președintele", -12.936559677124023 ], [ "▁seekers", -12.936627388000488 ], [ "func", -12.936904907226562 ], [ "▁Markus", -12.93691349029541 ], [ "Unter", -12.936923027038574 ], [ "▁übertragen", -12.937003135681152 ], [ "▁adaptive", -12.937024116516113 ], [ "caster", -12.937051773071289 ], [ "▁geek", -12.937164306640625 ], [ "▁réservation", -12.937236785888672 ], [ "▁irritation", -12.937240600585938 ], [ "▁HDMI", -12.937346458435059 ], [ "Seeing", -12.937485694885254 ], [ "▁genul", -12.937569618225098 ], [ "▁catastrophe", -12.937662124633789 ], [ "▁Tweet", -12.937665939331055 ], [ "TZ", -12.937729835510254 ], [ "▁credible", -12.937946319580078 ], [ "▁cobor", -12.938064575195312 ], [ "▁realizeaz", -12.938159942626953 ], [ "journal", -12.938274383544922 ], [ "▁shaking", -12.938532829284668 ], [ "3-6", -12.938572883605957 ], [ "▁beneficiaz", -12.938605308532715 ], [ "▁Frankreich", -12.938633918762207 ], [ "committing", -12.9386568069458 ], [ "AMS", -12.938835144042969 ], [ "▁Feli", -12.939007759094238 ], [ "▁Producer", -12.939023971557617 ], [ "▁übrig", -12.93940544128418 ], [ "gemeinde", -12.939593315124512 ], [ "should", -12.939799308776855 ], [ "▁neurons", -12.939799308776855 ], [ "▁Agenda", -12.939833641052246 ], [ "▁hashtag", -12.939896583557129 ], [ "▁confortabil", -12.939897537231445 ], [ "520", -12.940008163452148 ], [ "bonded", -12.940033912658691 ], [ "▁următoare", -12.940191268920898 ], [ "▁volatile", -12.940223693847656 ], [ "infamous", -12.940225601196289 ], [ "seară", -12.940229415893555 ], [ "▁Sorge", -12.940346717834473 ], [ "▁Beiträge", -12.940420150756836 ], [ "▁îndeplin", -12.940449714660645 ], [ "gespräch", -12.940649032592773 ], [ "▁joueur", -12.940701484680176 ], [ "▁outsourcing", -12.940701484680176 ], [ "▁Guvernul", -12.940814018249512 ], [ "6-2", -12.940818786621094 ], [ "▁prioritize", -12.941068649291992 ], [ "▁duminică", -12.941076278686523 ], [ "▁resignation", -12.941076278686523 ], [ "▁Converter", -12.941079139709473 ], [ "hereby", -12.941155433654785 ], [ "▁stresses", -12.941299438476562 ], [ "▁brun", -12.941415786743164 ], [ "▁elev", -12.941423416137695 ], [ "▁Skip", -12.941479682922363 ], [ "540", -12.941499710083008 ], [ "TURE", -12.941603660583496 ], [ "▁Lynch", -12.941635131835938 ], [ "▁preveni", -12.941643714904785 ], [ "compatible", -12.941692352294922 ], [ "surveyed", -12.941702842712402 ], [ "▁Ausnahme", -12.941713333129883 ], [ "▁medicul", -12.941812515258789 ], [ "▁subtil", -12.941865921020508 ], [ "▁Quali", -12.941890716552734 ], [ "▁techno", -12.941900253295898 ], [ "presently", -12.94193172454834 ], [ "▁Müller", -12.941934585571289 ], [ "DIRECT", -12.941937446594238 ], [ "schuld", -12.941944122314453 ], [ "▁Bloomberg", -12.941994667053223 ], [ "feuer", -12.942181587219238 ], [ "▁Pharmacy", -12.942270278930664 ], [ "▁Schnitt", -12.942301750183105 ], [ "186", -12.942333221435547 ], [ "peaks", -12.942355155944824 ], [ "▁Gemeinsam", -12.94235897064209 ], [ "▁récemment", -12.94235897064209 ], [ "▁Pascal", -12.942490577697754 ], [ "filmed", -12.942523956298828 ], [ "RCA", -12.942548751831055 ], [ "▁virtuelle", -12.942622184753418 ], [ "▁dotat", -12.942630767822266 ], [ "logisch", -12.942717552185059 ], [ "▁Luck", -12.943005561828613 ], [ "cosy", -12.943132400512695 ], [ "▁Awareness", -12.943216323852539 ], [ "▁gesetzlich", -12.943263053894043 ], [ "padded", -12.943306922912598 ], [ "▁Lotus", -12.943395614624023 ], [ "urging", -12.9434175491333 ], [ "▁mushroom", -12.943426132202148 ], [ "▁adultes", -12.943527221679688 ], [ "▁Coca", -12.943571090698242 ], [ "▁recev", -12.943586349487305 ], [ "▁mantra", -12.943610191345215 ], [ "▁practise", -12.943644523620605 ], [ "▁acceler", -12.943663597106934 ], [ "bolster", -12.943756103515625 ], [ "▁compressed", -12.943818092346191 ], [ "TIN", -12.943899154663086 ], [ "▁aromatic", -12.944236755371094 ], [ "geleitet", -12.944408416748047 ], [ "▁fibr", -12.944443702697754 ], [ "exécut", -12.94444751739502 ], [ "▁unconscious", -12.94456958770752 ], [ "HAR", -12.944607734680176 ], [ "▁Gregory", -12.944661140441895 ], [ "▁Manila", -12.944738388061523 ], [ "ozitate", -12.944756507873535 ], [ "exemplary", -12.944803237915039 ], [ "éventuel", -12.944906234741211 ], [ "▁Craciun", -12.944930076599121 ], [ "▁tehnologii", -12.944931030273438 ], [ "▁Despre", -12.945138931274414 ], [ "▁1917", -12.945141792297363 ], [ "▁upfront", -12.945146560668945 ], [ "▁Iulia", -12.945280075073242 ], [ "▁erwähnt", -12.945359230041504 ], [ "▁magnesium", -12.945359230041504 ], [ "▁descriptive", -12.94536304473877 ], [ "▁consumul", -12.945364952087402 ], [ "▁10-15", -12.945423126220703 ], [ "▁erfüllen", -12.945611953735352 ], [ "gig", -12.945657730102539 ], [ "430", -12.945765495300293 ], [ "▁Migration", -12.945789337158203 ], [ "bră", -12.94579029083252 ], [ "▁réforme", -12.945863723754883 ], [ "▁york", -12.94610595703125 ], [ "dritten", -12.946109771728516 ], [ "cumva", -12.946182250976562 ], [ "▁Alumni", -12.946218490600586 ], [ "▁Ceramic", -12.946222305297852 ], [ "▁rappelle", -12.946236610412598 ], [ "▁pianist", -12.946248054504395 ], [ "twisted", -12.946306228637695 ], [ "earned", -12.946432113647461 ], [ "▁Hose", -12.946514129638672 ], [ "156", -12.946610450744629 ], [ "▁Salmon", -12.946687698364258 ], [ "Level", -12.946913719177246 ], [ "▁swirl", -12.947052001953125 ], [ "erfahrung", -12.947061538696289 ], [ "▁liabilities", -12.947078704833984 ], [ "praxis", -12.9470853805542 ], [ "IPO", -12.947089195251465 ], [ "▁screaming", -12.947092056274414 ], [ "emphasized", -12.947200775146484 ], [ "DEA", -12.947260856628418 ], [ "▁dermatolog", -12.947351455688477 ], [ "▁pacate", -12.947498321533203 ], [ "▁ansamblu", -12.947507858276367 ], [ "▁beteiligt", -12.947509765625 ], [ "▁Needles", -12.947574615478516 ], [ "▁organisiert", -12.947607040405273 ], [ "Pacific", -12.947639465332031 ], [ "actual", -12.947823524475098 ], [ "prindere", -12.94801139831543 ], [ "▁Indoor", -12.948348045349121 ], [ "▁Gewalt", -12.948431015014648 ], [ "▁rezid", -12.948507308959961 ], [ "censor", -12.948522567749023 ], [ "▁unlawful", -12.94882869720459 ], [ "▁Explain", -12.948873519897461 ], [ "▁Flame", -12.948897361755371 ], [ "▁brachte", -12.948941230773926 ], [ "▁Mustang", -12.94899845123291 ], [ "ectomy", -12.949044227600098 ], [ "▁deliberate", -12.949064254760742 ], [ "▁sparkle", -12.949225425720215 ], [ "▁inchis", -12.94926929473877 ], [ "▁Cristian", -12.949289321899414 ], [ "▁facture", -12.949291229248047 ], [ "▁Grundstück", -12.949292182922363 ], [ "außerhalb", -12.949300765991211 ], [ "coast", -12.949321746826172 ], [ "anilor", -12.949396133422852 ], [ "255", -12.94952392578125 ], [ "nterdisciplinary", -12.949576377868652 ], [ "▁Isabel", -12.949655532836914 ], [ "▁Städte", -12.949701309204102 ], [ "▁cicl", -12.949837684631348 ], [ "▁Zeug", -12.949905395507812 ], [ "▁Muskel", -12.949951171875 ], [ "▁indirectly", -12.950051307678223 ], [ "▁Vorbereitung", -12.950093269348145 ], [ "MMA", -12.95012378692627 ], [ "▁pudding", -12.950197219848633 ], [ "rax", -12.950389862060547 ], [ "▁Stimmung", -12.95052433013916 ], [ "▁hierarchy", -12.95052433013916 ], [ "partie", -12.950597763061523 ], [ "▁elevate", -12.950685501098633 ], [ "▁Persian", -12.950690269470215 ], [ "forensic", -12.95077896118164 ], [ "Become", -12.950854301452637 ], [ "leicht", -12.9508695602417 ], [ "▁staging", -12.950942039489746 ], [ "▁fühlt", -12.950965881347656 ], [ "fenster", -12.950979232788086 ], [ "▁unbelievable", -12.951089859008789 ], [ "„", -12.951260566711426 ], [ "▁Guatemala", -12.951387405395508 ], [ "LET", -12.95141315460205 ], [ "▁buff", -12.951454162597656 ], [ "▁Primul", -12.951626777648926 ], [ "▁mainland", -12.951702117919922 ], [ "campus", -12.951923370361328 ], [ "▁gefällt", -12.952075958251953 ], [ "BAN", -12.952153205871582 ], [ "finish", -12.952229499816895 ], [ "accustomed", -12.952251434326172 ], [ "▁Businesses", -12.95234203338623 ], [ "▁întreb", -12.95239543914795 ], [ "▁recomandă", -12.952425956726074 ], [ "▁pellet", -12.952474594116211 ], [ "▁GST", -12.952507972717285 ], [ "SEA", -12.952601432800293 ], [ "▁categorie", -12.952631950378418 ], [ "▁convainc", -12.95268440246582 ], [ "▁considéré", -12.952739715576172 ], [ "rois", -12.952853202819824 ], [ "▁thrust", -12.952898979187012 ], [ "ijk", -12.953001022338867 ], [ "gefüllt", -12.953118324279785 ], [ "▁situatii", -12.953327178955078 ], [ "▁Jacksonville", -12.95337200164795 ], [ "▁bakery", -12.953473091125488 ], [ "▁Accident", -12.953554153442383 ], [ "▁urmeaza", -12.953572273254395 ], [ "▁crib", -12.953593254089355 ], [ "getroffen", -12.953707695007324 ], [ "Based", -12.953877449035645 ], [ "Including", -12.95398235321045 ], [ "▁Morocco", -12.95398235321045 ], [ "▁casserole", -12.95398235321045 ], [ "▁enquiry", -12.953983306884766 ], [ "▁pahar", -12.954017639160156 ], [ "▁Unternehmer", -12.954025268554688 ], [ "électro", -12.954068183898926 ], [ "Marie", -12.95413589477539 ], [ "▁Sno", -12.954153060913086 ], [ "▁prostate", -12.954168319702148 ], [ "▁Wallace", -12.95426082611084 ], [ "empre", -12.954402923583984 ], [ "▁Multumesc", -12.954415321350098 ], [ "White", -12.954675674438477 ], [ "brief", -12.954751014709473 ], [ "▁kitten", -12.954751014709473 ], [ "füh", -12.954780578613281 ], [ "▁mankind", -12.954821586608887 ], [ "ENE", -12.95483112335205 ], [ "▁Ethics", -12.954848289489746 ], [ "▁Realty", -12.954946517944336 ], [ "▁Emerg", -12.954988479614258 ], [ "7-8", -12.955055236816406 ], [ "museum", -12.955096244812012 ], [ "BRE", -12.95518970489502 ], [ "▁kilometri", -12.955282211303711 ], [ "oyaume", -12.955286026000977 ], [ "▁Cambodia", -12.955288887023926 ], [ "▁bruit", -12.955304145812988 ], [ "▁sépar", -12.955334663391113 ], [ "mastered", -12.9554443359375 ], [ "shake", -12.955608367919922 ], [ "▁liaison", -12.955718994140625 ], [ "▁Boulder", -12.955719947814941 ], [ "▁tortilla", -12.955720901489258 ], [ "▁Fokus", -12.955731391906738 ], [ "▁Blair", -12.95573902130127 ], [ "▁disturbance", -12.955775260925293 ], [ "geladen", -12.955843925476074 ], [ "▁sunscreen", -12.955886840820312 ], [ "▁reuș", -12.955896377563477 ], [ "▁Braun", -12.956155776977539 ], [ "▁existente", -12.956157684326172 ], [ "stift", -12.956242561340332 ], [ "▁preot", -12.956387519836426 ], [ "▁doved", -12.956445693969727 ], [ "sexual", -12.956488609313965 ], [ "meanwhile", -12.956583976745605 ], [ "▁legislature", -12.956583976745605 ], [ "▁vermeiden", -12.956583976745605 ], [ "▁inequality", -12.95687484741211 ], [ "▁turc", -12.956881523132324 ], [ "ви", -12.95698070526123 ], [ "▁Kontrolle", -12.95702075958252 ], [ "▁Ursache", -12.95704174041748 ], [ "▁confess", -12.95704174041748 ], [ "▁poetic", -12.957109451293945 ], [ "attention", -12.957236289978027 ], [ "textured", -12.957386016845703 ], [ "GES", -12.957586288452148 ], [ "6-4", -12.957637786865234 ], [ "Ray", -12.957696914672852 ], [ "chromat", -12.957745552062988 ], [ "▁insightful", -12.957775115966797 ], [ "▁Navigation", -12.957887649536133 ], [ "▁destiny", -12.957887649536133 ], [ "▁ergeben", -12.957892417907715 ], [ "▁versteh", -12.958090782165527 ], [ "301", -12.958209037780762 ], [ "▁Exterior", -12.958321571350098 ], [ "église", -12.958322525024414 ], [ "▁Failure", -12.958322525024414 ], [ "▁Patricia", -12.958324432373047 ], [ "▁geschützt", -12.958328247070312 ], [ "intrarea", -12.95833969116211 ], [ "▁Forward", -12.958368301391602 ], [ "▁Portrait", -12.95844841003418 ], [ "▁enregistré", -12.958480834960938 ], [ "▁wagon", -12.958620071411133 ], [ "stealing", -12.958879470825195 ], [ "▁Numero", -12.958880424499512 ], [ "▁tradui", -12.958986282348633 ], [ "▁klassische", -12.959033966064453 ], [ "▁profitieren", -12.959043502807617 ], [ "▁laboratories", -12.95919132232666 ], [ "▁reconnaissance", -12.95919132232666 ], [ "ку", -12.959314346313477 ], [ "▁Petersburg", -12.959359169006348 ], [ "▁fertility", -12.959421157836914 ], [ "▁Understand", -12.959516525268555 ], [ "dehors", -12.959746360778809 ], [ "▁Knox", -12.959762573242188 ], [ "software", -12.959797859191895 ], [ "▁Celebration", -12.959823608398438 ], [ "4.6", -12.959897994995117 ], [ "quino", -12.959930419921875 ], [ "▁endeavour", -12.960073471069336 ], [ "▁temptation", -12.960136413574219 ], [ "▁Registry", -12.96035385131836 ], [ "IMP", -12.960502624511719 ], [ "bedingt", -12.960625648498535 ], [ "▁$60", -12.960846900939941 ], [ "▁Kriterien", -12.96093463897705 ], [ "▁strawberries", -12.960943222045898 ], [ "▁conspiracy", -12.96094799041748 ], [ "▁pouch", -12.960976600646973 ], [ "▁Alexandria", -12.961017608642578 ], [ "▁Mick", -12.961102485656738 ], [ "extra", -12.961114883422852 ], [ "▁Operator", -12.961151123046875 ], [ "enduring", -12.96132755279541 ], [ "▁smash", -12.961359024047852 ], [ "Euro", -12.961360931396484 ], [ "▁Nouvelle", -12.961370468139648 ], [ "▁Raspberry", -12.961370468139648 ], [ "▁präsentieren", -12.961380004882812 ], [ "▁electrician", -12.961404800415039 ], [ "▁cheerful", -12.961472511291504 ], [ "▁chargé", -12.961508750915527 ], [ "▁Diskussion", -12.961511611938477 ], [ "▁surpass", -12.961604118347168 ], [ "▁Acces", -12.961701393127441 ], [ "tausend", -12.961771011352539 ], [ "▁vigorous", -12.961808204650879 ], [ "▁tava", -12.961810111999512 ], [ "CHO", -12.96193790435791 ], [ "▁1951", -12.961941719055176 ], [ "▁Umsatz", -12.962019920349121 ], [ "▁slavery", -12.962055206298828 ], [ "travel", -12.962294578552246 ], [ "▁correspondent", -12.962297439575195 ], [ "▁$150", -12.962307929992676 ], [ "▁stärker", -12.962594985961914 ], [ "Alb", -12.96264362335205 ], [ "▁Lopez", -12.962682723999023 ], [ "▁longueur", -12.962767601013184 ], [ "▁successive", -12.962772369384766 ], [ "▁(2015)", -12.96278190612793 ], [ "teig", -12.962790489196777 ], [ "custom", -12.962944984436035 ], [ "TIM", -12.963099479675293 ], [ "▁Escape", -12.963174819946289 ], [ "▁Sekunden", -12.963349342346191 ], [ "tiré", -12.963444709777832 ], [ "▁chantier", -12.963489532470703 ], [ "▁saturated", -12.963555335998535 ], [ "▁confrontation", -12.963804244995117 ], [ "▁biography", -12.963805198669434 ], [ "zuerst", -12.9639892578125 ], [ "▁rencontré", -12.963991165161133 ], [ "▁harmless", -12.96412181854248 ], [ "Branche", -12.964139938354492 ], [ "▁QR", -12.964380264282227 ], [ "▁Ereignis", -12.964430809020996 ], [ "▁verkaufen", -12.96444320678711 ], [ "0:00", -12.96451187133789 ], [ "Association", -12.96469783782959 ], [ "▁Santiago", -12.964865684509277 ], [ "Control", -12.964993476867676 ], [ "▁Angriff", -12.9650297164917 ], [ "lase", -12.96505069732666 ], [ "▁sfaturi", -12.965224266052246 ], [ "▁Comprehensive", -12.965304374694824 ], [ "▁Shepherd", -12.965304374694824 ], [ "▁exponential", -12.965304374694824 ], [ "▁penetration", -12.965304374694824 ], [ "▁comble", -12.965394973754883 ], [ "ionar", -12.965557098388672 ], [ "slept", -12.965563774108887 ], [ "▁Spice", -12.965633392333984 ], [ "mAh", -12.965688705444336 ], [ "▁Vertreter", -12.965747833251953 ], [ "fehler", -12.965752601623535 ], [ "▁Scroll", -12.96599292755127 ], [ "▁WARRANT", -12.966179847717285 ], [ "▁minimise", -12.966326713562012 ], [ "▁Dept", -12.966474533081055 ], [ "▁urinar", -12.96661376953125 ], [ "établir", -12.966619491577148 ], [ "verhältnis", -12.966713905334473 ], [ "▁glowing", -12.966979026794434 ], [ "kulturelle", -12.966984748840332 ], [ "▁Pediatric", -12.967057228088379 ], [ "▁inconvenience", -12.967057228088379 ], [ "Antoine", -12.967121124267578 ], [ "▁Heck", -12.967164993286133 ], [ "▁couches", -12.967265129089355 ], [ "▁1938", -12.967331886291504 ], [ "maybe", -12.967333793640137 ], [ "ETA", -12.9673433303833 ], [ "▁solaire", -12.96748161315918 ], [ "▁Zürich", -12.967495918273926 ], [ "computer", -12.967545509338379 ], [ "milk", -12.96756362915039 ], [ "он", -12.967585563659668 ], [ "modalitate", -12.967608451843262 ], [ "spanning", -12.967655181884766 ], [ "▁Crypto", -12.96774959564209 ], [ "▁Spotify", -12.967935562133789 ], [ "mycin", -12.967944145202637 ], [ "▁similarities", -12.96811294555664 ], [ "▁eclipse", -12.968377113342285 ], [ "Map", -12.968610763549805 ], [ "double", -12.96861743927002 ], [ "corporate", -12.968734741210938 ], [ "▁Hindi", -12.968853950500488 ], [ "battling", -12.968866348266602 ], [ "▁habituel", -12.969098091125488 ], [ "▁Transition", -12.969196319580078 ], [ "▁luptă", -12.96920394897461 ], [ "▁trainee", -12.969219207763672 ], [ "LIS", -12.96922492980957 ], [ "▁Vatican", -12.969254493713379 ], [ "Archived", -12.9692964553833 ], [ "Connect", -12.969305038452148 ], [ "▁prealabil", -12.969307899475098 ], [ "▁Chambre", -12.969327926635742 ], [ "stuhl", -12.969440460205078 ], [ "▁arrivé", -12.969557762145996 ], [ "▁Urteil", -12.969575881958008 ], [ "▁scrutiny", -12.969818115234375 ], [ "▁memoir", -12.969854354858398 ], [ "▁innovant", -12.9699068069458 ], [ "▁sublime", -12.969943046569824 ], [ "children", -12.970004081726074 ], [ "▁Handwerk", -12.970056533813477 ], [ "▁campuses", -12.970268249511719 ], [ "▁durabil", -12.970502853393555 ], [ "▁immersive", -12.970632553100586 ], [ "▁Magnet", -12.970732688903809 ], [ "läufe", -12.970808029174805 ], [ "▁Techno", -12.970837593078613 ], [ "MAP", -12.9710693359375 ], [ "7.2", -12.971145629882812 ], [ "▁Schwimm", -12.971181869506836 ], [ "BOOK", -12.971186637878418 ], [ "188", -12.971441268920898 ], [ "▁Supervisor", -12.971498489379883 ], [ "prévue", -12.971691131591797 ], [ "needed", -12.971813201904297 ], [ "▁creditors", -12.971822738647461 ], [ "▁brin", -12.971837043762207 ], [ "▁Neck", -12.971900939941406 ], [ "▁Salut", -12.971988677978516 ], [ "▁despair", -12.972105979919434 ], [ "▁Sauce", -12.972261428833008 ], [ "▁Westminster", -12.972335815429688 ], [ "▁langfristig", -12.972335815429688 ], [ "▁northeast", -12.972365379333496 ], [ "▁încercat", -12.972399711608887 ], [ "▁nausea", -12.972408294677734 ], [ "▁Paypal", -12.972440719604492 ], [ "▁Arrow", -12.972469329833984 ], [ "▁Travis", -12.972633361816406 ], [ "(2009)", -12.972713470458984 ], [ "▁Rising", -12.972719192504883 ], [ "termes", -12.973097801208496 ], [ "Australie", -12.973154067993164 ], [ "▁scarf", -12.973187446594238 ], [ "klassischen", -12.97337818145752 ], [ "▁boug", -12.973466873168945 ], [ "DOT", -12.97360610961914 ], [ "▁Trink", -12.97361946105957 ], [ "▁bestätigt", -12.97365951538086 ], [ "▁officiel", -12.97370433807373 ], [ "Produkt", -12.973873138427734 ], [ "DNA", -12.974140167236328 ], [ "▁*******", -12.97426700592041 ], [ "GAR", -12.974271774291992 ], [ "therapeut", -12.974377632141113 ], [ "187", -12.974420547485352 ], [ "▁Louisville", -12.974493026733398 ], [ "▁geöffnet", -12.97462272644043 ], [ "Watch", -12.974640846252441 ], [ "85%", -12.974678993225098 ], [ "▁Candida", -12.974698066711426 ], [ "▁Kathy", -12.974703788757324 ], [ "▁Animation", -12.974711418151855 ], [ "planung", -12.974715232849121 ], [ "woche", -12.974730491638184 ], [ "Video", -12.974966049194336 ], [ "▁Automation", -12.97507095336914 ], [ "▁foliage", -12.97507381439209 ], [ "▁evenimentului", -12.975175857543945 ], [ "SEN", -12.975362777709961 ], [ "▁Dialog", -12.975372314453125 ], [ "▁ZIP", -12.975372314453125 ], [ "▁vieții", -12.97537612915039 ], [ "▁passionné", -12.975425720214844 ], [ "▁WOW", -12.97544002532959 ], [ "ectiv", -12.975464820861816 ], [ "▁vorbesc", -12.975482940673828 ], [ "▁computational", -12.975533485412598 ], [ "▁idiot", -12.97557258605957 ], [ "▁stigma", -12.97567081451416 ], [ "▁multumesc", -12.975870132446289 ], [ "▁sărbători", -12.975870132446289 ], [ "▁Advantage", -12.975906372070312 ], [ "▁alegeri", -12.976024627685547 ], [ "▁philosopher", -12.976031303405762 ], [ "RIE", -12.976117134094238 ], [ "refundable", -12.976221084594727 ], [ "▁Sofia", -12.97623348236084 ], [ "▁încheiat", -12.976313591003418 ], [ "meilleures", -12.976473808288574 ], [ "critical", -12.976744651794434 ], [ "▁cavity", -12.976766586303711 ], [ "▁ressort", -12.976792335510254 ], [ "strong", -12.976798057556152 ], [ "▁Backup", -12.976948738098145 ], [ "▁Zeitraum", -12.977023124694824 ], [ "▁Szene", -12.977027893066406 ], [ "▁Candle", -12.977173805236816 ], [ "▁ciocolat", -12.977198600769043 ], [ "etched", -12.977227210998535 ], [ "ан", -12.977302551269531 ], [ "▁Anchor", -12.977365493774414 ], [ "equate", -12.977470397949219 ], [ "▁bulg", -12.977476119995117 ], [ "▁motorist", -12.977524757385254 ], [ "träglich", -12.977736473083496 ], [ "please", -12.977936744689941 ], [ "different", -12.978011131286621 ], [ "▁Accel", -12.97813606262207 ], [ "Proiectul", -12.97829818725586 ], [ "▁cabbage", -12.97852897644043 ], [ "▁télécharger", -12.97852897644043 ], [ "▁Presentation", -12.97856330871582 ], [ "▁Struktur", -12.978621482849121 ], [ "bücher", -12.978650093078613 ], [ "▁flatter", -12.978672981262207 ], [ "emprunt", -12.979074478149414 ], [ "▁oriental", -12.979111671447754 ], [ "▁Turnier", -12.979166984558105 ], [ "brücke", -12.97917366027832 ], [ "▁légumes", -12.979416847229004 ], [ "gerechnet", -12.979595184326172 ], [ "flooded", -12.979621887207031 ], [ "LER", -12.979679107666016 ], [ "üben", -12.97973918914795 ], [ "internaute", -12.979888916015625 ], [ "▁Austausch", -12.979935646057129 ], [ "gefordert", -12.980034828186035 ], [ "▁adoptat", -12.980277061462402 ], [ "▁erinnern", -12.980305671691895 ], [ "▁dolphin", -12.980307579040527 ], [ "▁Parkinson", -12.980308532714844 ], [ "büro", -12.980310440063477 ], [ "▁Crest", -12.980368614196777 ], [ "▁Ikea", -12.980437278747559 ], [ "▁ecologic", -12.980470657348633 ], [ "mplă", -12.98065185546875 ], [ "▁șef", -12.980655670166016 ], [ "coop", -12.980868339538574 ], [ "▁Carson", -12.980900764465332 ], [ "▁uşor", -12.981054306030273 ], [ "▁exert", -12.981070518493652 ], [ "▁countertop", -12.981114387512207 ], [ "ntended", -12.981136322021484 ], [ "▁Civic", -12.981313705444336 ], [ "▁attentes", -12.98133373260498 ], [ "gesetzlichen", -12.981356620788574 ], [ "frischen", -12.981475830078125 ], [ "▁Bottle", -12.981636047363281 ], [ "▁cautare", -12.982080459594727 ], [ "▁waterfront", -12.982226371765137 ], [ "▁centerpiece", -12.982312202453613 ], [ "▁Castel", -12.982441902160645 ], [ "510", -12.98270034790039 ], [ "capped", -12.982709884643555 ], [ "▁mattresses", -12.982850074768066 ], [ "▁readiness", -12.982865333557129 ], [ "diag", -12.982970237731934 ], [ "▁geändert", -12.982980728149414 ], [ "▁complained", -12.983051300048828 ], [ "▁diary", -12.983073234558105 ], [ "▁ceremonies", -12.983144760131836 ], [ "▁următor", -12.983181953430176 ], [ "▁Engel", -12.983270645141602 ], [ "▁disconnect", -12.9832763671875 ], [ "▁Silvi", -12.983282089233398 ], [ "▁eingerichtet", -12.9834566116333 ], [ "medizin", -12.983512878417969 ], [ "▁majestic", -12.983869552612305 ], [ "▁Random", -12.983943939208984 ], [ "▁Equity", -12.984046936035156 ], [ "▁Echipa", -12.984111785888672 ], [ "са", -12.984163284301758 ], [ "316", -12.984179496765137 ], [ "▁Formation", -12.984183311462402 ], [ "inland", -12.98421859741211 ], [ "appuy", -12.984301567077637 ], [ "TAN", -12.984481811523438 ], [ "slipped", -12.984918594360352 ], [ "Certains", -12.985247611999512 ], [ "▁Silber", -12.98525333404541 ], [ "▁reçoi", -12.985257148742676 ], [ "▁Monthly", -12.985323905944824 ], [ "calculating", -12.985494613647461 ], [ "▁scratches", -12.98554515838623 ], [ "▁concurrence", -12.985654830932617 ], [ "▁Stärke", -12.985662460327148 ], [ "▁intermediar", -12.985751152038574 ], [ "▁erlebt", -12.98579216003418 ], [ "gesellschaftlich", -12.986037254333496 ], [ "▁Volk", -12.986041069030762 ], [ "▁Ansprüche", -12.986101150512695 ], [ "▁cumulative", -12.986103057861328 ], [ "▁Randy", -12.986183166503906 ], [ "▁instituții", -12.98622989654541 ], [ "together", -12.986489295959473 ], [ "▁Sap", -12.986539840698242 ], [ "▁modificari", -12.986551284790039 ], [ "▁erosion", -12.986572265625 ], [ "▁wicked", -12.986577033996582 ], [ "soaked", -12.986613273620605 ], [ "▁cellar", -12.9866361618042 ], [ "ignoring", -12.986726760864258 ], [ "▁scarce", -12.986815452575684 ], [ "ueuse", -12.98697280883789 ], [ "▁bibliothèque", -12.986995697021484 ], [ "critères", -12.987017631530762 ], [ "▁overlay", -12.987166404724121 ], [ "IPA", -12.98737907409668 ], [ "director", -12.987393379211426 ], [ "▁Krishna", -12.987444877624512 ], [ "▁methodologies", -12.987451553344727 ], [ "iocese", -12.987513542175293 ], [ "▁saucepan", -12.987713813781738 ], [ "184", -12.987948417663574 ], [ "275", -12.987981796264648 ], [ "▁précieu", -12.988165855407715 ], [ "▁academy", -12.9883394241333 ], [ "460", -12.988438606262207 ], [ "ERN", -12.988679885864258 ], [ "▁emoti", -12.988725662231445 ], [ "▁télévision", -12.988823890686035 ], [ "EDIT", -12.988901138305664 ], [ "▁Valeri", -12.989045143127441 ], [ "▁Charity", -12.98911190032959 ], [ "Voilà", -12.989297866821289 ], [ "▁lipsit", -12.989356994628906 ], [ "▁unleash", -12.989373207092285 ], [ "▁suferit", -12.989506721496582 ], [ "▁Lifestyle", -12.98953914642334 ], [ "▁Edel", -12.989603996276855 ], [ "▁Derek", -12.989643096923828 ], [ "▁Manga", -12.989801406860352 ], [ "▁increment", -12.989990234375 ], [ "▁plötzlich", -12.990133285522461 ], [ "▁5:30", -12.990208625793457 ], [ "▁Republicii", -12.990246772766113 ], [ "▁capitalism", -12.990293502807617 ], [ "ROW", -12.990510940551758 ], [ "▁Paar", -12.990523338317871 ], [ "allée", -12.99057674407959 ], [ "▁motto", -12.990610122680664 ], [ "Schäden", -12.990630149841309 ], [ "▁£10", -12.99063491821289 ], [ "RIP", -12.990728378295898 ], [ "courir", -12.990761756896973 ], [ "rocky", -12.990944862365723 ], [ "▁Sunshine", -12.991031646728516 ], [ "▁chimney", -12.991044998168945 ], [ "▁préfér", -12.991153717041016 ], [ "▁relaxare", -12.991189956665039 ], [ "▁colabora", -12.99134349822998 ], [ "liefer", -12.99142837524414 ], [ "▁ordentlich", -12.991486549377441 ], [ "▁dauerhaft", -12.991535186767578 ], [ "kammer", -12.991572380065918 ], [ "▁Basket", -12.991579055786133 ], [ "Site", -12.991657257080078 ], [ "▁Regina", -12.991716384887695 ], [ "▁simulate", -12.991868019104004 ], [ "▁wrestle", -12.991939544677734 ], [ "wertig", -12.991986274719238 ], [ "▁Christie", -12.992018699645996 ], [ "download", -12.992033004760742 ], [ "▁torch", -12.992213249206543 ], [ "riya", -12.992216110229492 ], [ "▁Grie", -12.992247581481934 ], [ "bitten", -12.992356300354004 ], [ "▁spezialisiert", -12.99238109588623 ], [ "▁Parade", -12.992408752441406 ], [ "▁migraine", -12.992830276489258 ], [ "▁Armstrong", -12.992846488952637 ], [ "▁cutie", -12.9928560256958 ], [ "▁bullying", -12.992889404296875 ], [ "▁Estonia", -12.99293041229248 ], [ "▁harvested", -12.992948532104492 ], [ "▁Hunger", -12.992971420288086 ], [ "▁frapp", -12.992999076843262 ], [ "REM", -12.993117332458496 ], [ "sensor", -12.993189811706543 ], [ "▁GREAT", -12.993293762207031 ], [ "▁thyroid", -12.993302345275879 ], [ "▁mărturi", -12.993335723876953 ], [ "ocupă", -12.993809700012207 ], [ "▁Wealth", -12.993812561035156 ], [ "▁convins", -12.993841171264648 ], [ "141", -12.993876457214355 ], [ "▁vingt", -12.993901252746582 ], [ "▁revel", -12.994054794311523 ], [ "▁Adri", -12.994083404541016 ], [ "▁remix", -12.994207382202148 ], [ "▁fermentation", -12.99425220489502 ], [ "▁achiziti", -12.994352340698242 ], [ "dream", -12.994426727294922 ], [ "▁contemporan", -12.994632720947266 ], [ "▁youngsters", -12.994685173034668 ], [ "▁Hartford", -12.994745254516602 ], [ "▁Wagen", -12.994988441467285 ], [ "▁Celebr", -12.995214462280273 ], [ "leveraging", -12.99527645111084 ], [ "▁Iasi", -12.99549674987793 ], [ "tackling", -12.9955415725708 ], [ "▁intrinsic", -12.995553970336914 ], [ "▁Macedon", -12.995603561401367 ], [ "NIA", -12.995784759521484 ], [ "▁bliss", -12.995905876159668 ], [ "▁gradual", -12.995908737182617 ], [ "▁inregistrat", -12.995981216430664 ], [ "▁volleyball", -12.995986938476562 ], [ "▁offiziell", -12.996054649353027 ], [ "▁carré", -12.99611759185791 ], [ "Mostly", -12.996174812316895 ], [ "▁Harley", -12.996193885803223 ], [ "▁locati", -12.996216773986816 ], [ "▁Klo", -12.996223449707031 ], [ "▁Equal", -12.996238708496094 ], [ "▁citat", -12.996369361877441 ], [ "▁argint", -12.996478080749512 ], [ "prüft", -12.996528625488281 ], [ "▁Fence", -12.996600151062012 ], [ "positive", -12.996988296508789 ], [ "▁Kaz", -12.997245788574219 ], [ "▁distortion", -12.997342109680176 ], [ "▁sâmbătă", -12.997342109680176 ], [ "▁frontière", -12.997346878051758 ], [ "▁revanch", -12.997394561767578 ], [ "▁Held", -12.997465133666992 ], [ "▁Hobb", -12.99776554107666 ], [ "▁reuşit", -12.997796058654785 ], [ "deem", -12.997880935668945 ], [ "▁dorint", -12.997902870178223 ], [ "▁Anlagen", -12.997908592224121 ], [ "▁cheval", -12.997973442077637 ], [ "630", -12.99806022644043 ], [ "▁implementare", -12.99808406829834 ], [ "▁curator", -12.99821662902832 ], [ "▁legislator", -12.998247146606445 ], [ "▁potassium", -12.998247146606445 ], [ "▁veterinarian", -12.998247146606445 ], [ "▁domenii", -12.998273849487305 ], [ "▁revue", -12.998310089111328 ], [ "Vielen", -12.998333930969238 ], [ "africain", -12.998570442199707 ], [ "before", -12.998680114746094 ], [ "▁Bestandteil", -12.998702049255371 ], [ "▁(2010)", -12.998767852783203 ], [ "▁Arlington", -12.999153137207031 ], [ "▁Gründung", -12.999153137207031 ], [ "▁Sprinkle", -12.999153137207031 ], [ "▁Princeton", -12.999186515808105 ], [ "chirurg", -12.999228477478027 ], [ "▁laissé", -12.999357223510742 ], [ "whoever", -12.999384880065918 ], [ "▁pasture", -12.999431610107422 ], [ "ajute", -12.999436378479004 ], [ "▁joyful", -12.999494552612305 ], [ "etapa", -12.999905586242676 ], [ "ESP", -13.000017166137695 ], [ "▁Iohannis", -13.000059127807617 ], [ "▁10:30", -13.000127792358398 ], [ "▁Kingston", -13.000140190124512 ], [ "▁contender", -13.000164031982422 ], [ "▁Damage", -13.000177383422852 ], [ "▁schreibt", -13.000482559204102 ], [ "sstisch", -13.000631332397461 ], [ "Associated", -13.00072956085205 ], [ "▁disposable", -13.000782012939453 ], [ "veranstaltung", -13.00096607208252 ], [ "▁puppet", -13.00100040435791 ], [ "pong", -13.001093864440918 ], [ "▁Chronicle", -13.001176834106445 ], [ "222", -13.001286506652832 ], [ "intuit", -13.001396179199219 ], [ "inscrire", -13.001429557800293 ], [ "▁speeches", -13.001431465148926 ], [ "▁Eingang", -13.001775741577148 ], [ "▁Adidas", -13.001875877380371 ], [ "▁cemetery", -13.001877784729004 ], [ "▁juicy", -13.001885414123535 ], [ "▁wertvolle", -13.0018892288208 ], [ "▁militari", -13.001917839050293 ], [ "China", -13.00196361541748 ], [ "ecția", -13.002041816711426 ], [ "luster", -13.002063751220703 ], [ "auftrag", -13.00234317779541 ], [ "▁Marius", -13.002523422241211 ], [ "▁crossover", -13.002555847167969 ], [ "▁enthusiast", -13.002555847167969 ], [ "▁cantitate", -13.002630233764648 ], [ "▁animat", -13.002634048461914 ], [ "Park", -13.002793312072754 ], [ "▁unchanged", -13.00279426574707 ], [ "russia", -13.00281810760498 ], [ "instant", -13.002833366394043 ], [ "ţiunea", -13.002835273742676 ], [ "▁franchi", -13.002920150756836 ], [ "▁mobiliz", -13.002963066101074 ], [ "athlet", -13.003013610839844 ], [ "▁Cardio", -13.0031099319458 ], [ "▁supus", -13.003119468688965 ], [ "▁Griff", -13.003137588500977 ], [ "flakes", -13.003217697143555 ], [ "soluble", -13.003250122070312 ], [ "Known", -13.003693580627441 ], [ "leaking", -13.003741264343262 ], [ "▁Holocaust", -13.004148483276367 ], [ "gift", -13.004197120666504 ], [ "▁tradiţi", -13.004359245300293 ], [ "▁southeast", -13.004498481750488 ], [ "▁correspondant", -13.00460147857666 ], [ "Isaiah", -13.004603385925293 ], [ "▁diagonal", -13.004606246948242 ], [ "▁Probabil", -13.004680633544922 ], [ "▁dégust", -13.004791259765625 ], [ "▁Naval", -13.004802703857422 ], [ "▁cultivation", -13.004839897155762 ], [ "▁Vertrieb", -13.004849433898926 ], [ "▁pony", -13.004854202270508 ], [ "▁Throw", -13.0050048828125 ], [ "little", -13.005010604858398 ], [ "▁remarque", -13.005074501037598 ], [ "▁parcare", -13.005085945129395 ], [ "3.8", -13.00518798828125 ], [ "▁renunt", -13.005330085754395 ], [ "▁Rewards", -13.005487442016602 ], [ "▁Thur", -13.005496978759766 ], [ "▁underestimate", -13.005515098571777 ], [ "▁frankly", -13.005516052246094 ], [ "Bretagne", -13.005517959594727 ], [ "axial", -13.005537986755371 ], [ "▁identities", -13.0055570602417 ], [ "▁Harvest", -13.00561237335205 ], [ "▁skippe", -13.00561237335205 ], [ "▁Boutique", -13.005670547485352 ], [ "▁intuition", -13.005746841430664 ], [ "▁Rotary", -13.00581169128418 ], [ "▁SERVICE", -13.005875587463379 ], [ "▁refill", -13.005915641784668 ], [ "▁arcade", -13.006060600280762 ], [ "▁komme", -13.006386756896973 ], [ "▁irrelevant", -13.006427764892578 ], [ "▁Sortiment", -13.006429672241211 ], [ "▁scriitor", -13.006488800048828 ], [ "▁clicked", -13.006516456604004 ], [ "▁ciel", -13.006610870361328 ], [ "▁Caesar", -13.00680160522461 ], [ "hound", -13.006803512573242 ], [ "whipped", -13.006843566894531 ], [ "licate", -13.006867408752441 ], [ "▁formatting", -13.006986618041992 ], [ "▁mosaic", -13.007028579711914 ], [ "(2017)", -13.007122039794922 ], [ "777", -13.007257461547852 ], [ "▁Messenger", -13.007342338562012 ], [ "dulci", -13.007369041442871 ], [ "▁(2016)", -13.007420539855957 ], [ "▁popcorn", -13.007425308227539 ], [ "▁Presidential", -13.007497787475586 ], [ "▁brokerage", -13.007564544677734 ], [ "dachte", -13.00762939453125 ], [ "verkauf", -13.00768756866455 ], [ "▁pomme", -13.007721900939941 ], [ "▁fret", -13.007822036743164 ], [ "▁revere", -13.007894515991211 ], [ "▁Canvas", -13.008092880249023 ], [ "▁Nottingham", -13.008255004882812 ], [ "▁Refuge", -13.008257865905762 ], [ "▁injustice", -13.008259773254395 ], [ "▁External", -13.008264541625977 ], [ "dincolo", -13.008304595947266 ], [ "directing", -13.008511543273926 ], [ "▁Toulouse", -13.008710861206055 ], [ "▁cheltuieli", -13.008746147155762 ], [ "▁distrus", -13.008816719055176 ], [ "impôt", -13.008912086486816 ], [ "landschaft", -13.008964538574219 ], [ "passion", -13.00897216796875 ], [ "▁Hobby", -13.009099006652832 ], [ "significant", -13.009115219116211 ], [ "▁Guinea", -13.009209632873535 ], [ "pecializing", -13.009237289428711 ], [ "pozitie", -13.009245872497559 ], [ "bourne", -13.009295463562012 ], [ "▁mâini", -13.00933837890625 ], [ "▁CFR", -13.009395599365234 ], [ "▁Konflikt", -13.009626388549805 ], [ "▁Vodafone", -13.009626388549805 ], [ "OUG", -13.009681701660156 ], [ "▁Übersicht", -13.009735107421875 ], [ "negotiated", -13.009903907775879 ], [ "▁gliss", -13.010042190551758 ], [ "▁Kapital", -13.010111808776855 ], [ "QC", -13.0101318359375 ], [ "▁gentleman", -13.01024341583252 ], [ "Inde", -13.010514259338379 ], [ "▁immensely", -13.010639190673828 ], [ "Business", -13.010702133178711 ], [ "▁04/2", -13.010882377624512 ], [ "societatea", -13.010973930358887 ], [ "fluoxetine", -13.011000633239746 ], [ "▁Wachstum", -13.011000633239746 ], [ "▁récit", -13.011011123657227 ], [ "▁Preisvergleich", -13.011034965515137 ], [ "▁Mohammed", -13.011460304260254 ], [ "gefangen", -13.011462211608887 ], [ "▁calibration", -13.011608123779297 ], [ "bekam", -13.011728286743164 ], [ "▁FUN", -13.011758804321289 ], [ "wasting", -13.011839866638184 ], [ "▁prosper", -13.011862754821777 ], [ "▁Afghan", -13.011919021606445 ], [ "▁Heroes", -13.011921882629395 ], [ "▁VMware", -13.011927604675293 ], [ "exception", -13.011969566345215 ], [ "▁înlocui", -13.01244831085205 ], [ "Neu", -13.01246452331543 ], [ "initiation", -13.01250171661377 ], [ "▁Peel", -13.01281452178955 ], [ "▁cunoaste", -13.012836456298828 ], [ "▁menschliche", -13.012849807739258 ], [ "▁poarta", -13.012852668762207 ], [ "▁congestion", -13.012930870056152 ], [ "▁îmbunătăț", -13.013103485107422 ], [ "EUR", -13.013171195983887 ], [ "▁sushi", -13.01326847076416 ], [ "Jährige", -13.01329517364502 ], [ "espoir", -13.013423919677734 ], [ "inspected", -13.013444900512695 ], [ "▁etape", -13.013677597045898 ], [ "▁pharmacist", -13.013754844665527 ], [ "flect", -13.013840675354004 ], [ "Changing", -13.013932228088379 ], [ "▁radiant", -13.014046669006348 ], [ "Daddy", -13.014275550842285 ], [ "▁categorii", -13.014360427856445 ], [ "quête", -13.014628410339355 ], [ "▁skincare", -13.014657020568848 ], [ "hébergement", -13.014674186706543 ], [ "840", -13.01477336883545 ], [ "awaiting", -13.014822006225586 ], [ "▁murdered", -13.014841079711914 ], [ "▁proficient", -13.014863967895508 ], [ "▁chauffe", -13.014899253845215 ], [ "▁contur", -13.014937400817871 ], [ "▁rejoindre", -13.015145301818848 ], [ "▁foloseste", -13.01521110534668 ], [ "▁Grup", -13.01535701751709 ], [ "152", -13.01541519165039 ], [ "▁workspace", -13.015438079833984 ], [ "▁primitive", -13.015546798706055 ], [ "▁Ginger", -13.015557289123535 ], [ "▁chemotherapy", -13.015595436096191 ], [ "▁platinum", -13.015596389770508 ], [ "▁sarcina", -13.01559829711914 ], [ "▁revival", -13.015820503234863 ], [ "▁Meditation", -13.016111373901367 ], [ "▁Vogel", -13.0161714553833 ], [ "IMA", -13.016359329223633 ], [ "▁handset", -13.016486167907715 ], [ "▁Nachmittag", -13.01651668548584 ], [ "▁déchets", -13.016517639160156 ], [ "▁Cornwall", -13.0165433883667 ], [ "▁Curry", -13.016605377197266 ], [ "▁cuplu", -13.016607284545898 ], [ "▁Birth", -13.016822814941406 ], [ "forward", -13.016936302185059 ], [ "Dezvoltare", -13.016977310180664 ], [ "▁irgendwie", -13.016980171203613 ], [ "▁erzielt", -13.016993522644043 ], [ "LOS", -13.01700496673584 ], [ "▁overload", -13.01708984375 ], [ "▁repay", -13.01713752746582 ], [ "urlaub", -13.017155647277832 ], [ "7.0", -13.01716423034668 ], [ "▁Wheat", -13.01748275756836 ], [ "▁degrab", -13.017488479614258 ], [ "▁Brock", -13.017491340637207 ], [ "▁inhabit", -13.0176362991333 ], [ "▁Speech", -13.017834663391113 ], [ "directional", -13.017862319946289 ], [ "▁Mandel", -13.017909049987793 ], [ "▁erscheinen", -13.01791763305664 ], [ "consciously", -13.018059730529785 ], [ "▁sunet", -13.0182523727417 ], [ "▁stole", -13.018259048461914 ], [ "▁Utilis", -13.018349647521973 ], [ "▁obstruction", -13.01852798461914 ], [ "▁mindfulness", -13.0186767578125 ], [ "partnering", -13.01868724822998 ], [ "CSI", -13.018819808959961 ], [ "204", -13.01905632019043 ], [ "▁squirrel", -13.019286155700684 ], [ "▁Rwanda", -13.01975154876709 ], [ "▁hunters", -13.019850730895996 ], [ "▁revitaliz", -13.02022647857666 ], [ "▁avansat", -13.020232200622559 ], [ "▁Yamaha", -13.020294189453125 ], [ "foto", -13.020435333251953 ], [ "▁Vegan", -13.020469665527344 ], [ "▁pitched", -13.02053165435791 ], [ "▁Vortrag", -13.020540237426758 ], [ "traditional", -13.020809173583984 ], [ "offrent", -13.021024703979492 ], [ "▁Expression", -13.021315574645996 ], [ "▁apprécié", -13.021354675292969 ], [ "▁Christina", -13.021408081054688 ], [ "eilig", -13.021464347839355 ], [ "▁verhindern", -13.021599769592285 ], [ "culturii", -13.021607398986816 ], [ "Aşa", -13.021703720092773 ], [ "▁enamel", -13.021756172180176 ], [ "▁fördern", -13.021771430969238 ], [ "▁acheté", -13.021798133850098 ], [ "▁eventuell", -13.021842956542969 ], [ "▁Sino", -13.021873474121094 ], [ "▁totodat", -13.022008895874023 ], [ "accelerated", -13.022202491760254 ], [ "▁strengthened", -13.02245044708252 ], [ "corro", -13.022482872009277 ], [ "4,5", -13.02253246307373 ], [ "▁Beverly", -13.022533416748047 ], [ "ulevard", -13.022615432739258 ], [ "▁hamper", -13.022644996643066 ], [ "▁Tempe", -13.02268123626709 ], [ "▁Yacht", -13.022799491882324 ], [ "▁LGBT", -13.022871017456055 ], [ "▁fingertips", -13.022991180419922 ], [ "▁Auftraggeber", -13.02299976348877 ], [ "▁harbour", -13.0230131149292 ], [ "blew", -13.0230712890625 ], [ "▁ideology", -13.023115158081055 ], [ "▁covenant", -13.023170471191406 ], [ "▁faction", -13.023419380187988 ], [ "▁animé", -13.023481369018555 ], [ "energie", -13.023515701293945 ], [ "iterführende", -13.02369499206543 ], [ "▁MAI", -13.023784637451172 ], [ "▁pluie", -13.023905754089355 ], [ "▁cathedral", -13.023919105529785 ], [ "▁chiropractic", -13.023919105529785 ], [ "monies", -13.023968696594238 ], [ "▁contraction", -13.024054527282715 ], [ "pvc", -13.024202346801758 ], [ "staff", -13.024209022521973 ], [ "BIT", -13.024216651916504 ], [ "EET", -13.024514198303223 ], [ "▁sanction", -13.024575233459473 ], [ "▁Reiki", -13.024709701538086 ], [ "Trying", -13.024772644042969 ], [ "▁endangered", -13.024847984313965 ], [ "▁Emperor", -13.024849891662598 ], [ "▁empfi", -13.024909973144531 ], [ "animation", -13.024998664855957 ], [ "207", -13.025029182434082 ], [ "separating", -13.02512264251709 ], [ "▁lucrative", -13.025148391723633 ], [ "▁ortho", -13.02524185180664 ], [ "variété", -13.025266647338867 ], [ "hésit", -13.025287628173828 ], [ "nuances", -13.025289535522461 ], [ "▁$250", -13.025394439697266 ], [ "▁drumuri", -13.025435447692871 ], [ "▁unsafe", -13.025446891784668 ], [ "▁1943", -13.025477409362793 ], [ "▁automatique", -13.025524139404297 ], [ "billed", -13.025585174560547 ], [ "▁rectangle", -13.02578067779541 ], [ "▁Spannung", -13.025781631469727 ], [ "▁dévoil", -13.025790214538574 ], [ "▁perimeter", -13.02580738067627 ], [ "▁imaginative", -13.02581787109375 ], [ "actifs", -13.025851249694824 ], [ "neuve", -13.0259428024292 ], [ "leagă", -13.026269912719727 ], [ "gehende", -13.026700973510742 ], [ "▁Gorgeous", -13.026708602905273 ], [ "▁impeccable", -13.026708602905273 ], [ "▁Curtain", -13.026718139648438 ], [ "▁presume", -13.026731491088867 ], [ "surpassed", -13.02687931060791 ], [ "schiff", -13.026927947998047 ], [ "Allied", -13.02699089050293 ], [ "fanden", -13.027080535888672 ], [ "▁célébr", -13.027174949645996 ], [ "▁phénomène", -13.027174949645996 ], [ "▁Powell", -13.027413368225098 ], [ "jean", -13.027631759643555 ], [ "▁peculiar", -13.027640342712402 ], [ "▁Antarctic", -13.027641296386719 ], [ "▁gradient", -13.027663230895996 ], [ "▁brainstorm", -13.027704238891602 ], [ "échapp", -13.027726173400879 ], [ "Bot", -13.027738571166992 ], [ "cita", -13.027743339538574 ], [ "▁lumber", -13.027752876281738 ], [ "weichen", -13.027852058410645 ], [ "▁Halte", -13.028024673461914 ], [ "▁noștri", -13.028107643127441 ], [ "construction", -13.028165817260742 ], [ "DOC", -13.028236389160156 ], [ "▁aluat", -13.028319358825684 ], [ "streamlined", -13.028462409973145 ], [ "Bio", -13.028494834899902 ], [ "▁nutritious", -13.028573036193848 ], [ "▁délicat", -13.0286283493042 ], [ "▁sticla", -13.028656959533691 ], [ "OVE", -13.028721809387207 ], [ "▁panneau", -13.028793334960938 ], [ "▁hetero", -13.028801918029785 ], [ "▁annul", -13.028839111328125 ], [ "IDA", -13.028935432434082 ], [ "▁pitches", -13.028960227966309 ], [ "▁Edmonton", -13.029040336608887 ], [ "mediated", -13.029136657714844 ], [ "AFP", -13.029139518737793 ], [ "▁Tibetan", -13.029228210449219 ], [ "intégration", -13.02934455871582 ], [ "▁Rox", -13.0294771194458 ], [ "energia", -13.02950668334961 ], [ "▁reconnaît", -13.029509544372559 ], [ "▁ține", -13.029525756835938 ], [ "▁ignition", -13.029534339904785 ], [ "Foarte", -13.029541015625 ], [ "▁HOME", -13.029545783996582 ], [ "▁MLB", -13.029545783996582 ], [ "▁Wähle", -13.029590606689453 ], [ "▁Merkel", -13.029658317565918 ], [ "poarte", -13.029664993286133 ], [ "ALT", -13.02979850769043 ], [ "jenigen", -13.029985427856445 ], [ "▁conflit", -13.029987335205078 ], [ "▁buckle", -13.029996871948242 ], [ "▁cacao", -13.030035018920898 ], [ "▁représentation", -13.030076026916504 ], [ "incepand", -13.030267715454102 ], [ "▁Carroll", -13.030306816101074 ], [ "▁clientilor", -13.030370712280273 ], [ "▁immunity", -13.030441284179688 ], [ "oût", -13.03044319152832 ], [ "▁Witch", -13.030488014221191 ], [ "▁Wolfgang", -13.030532836914062 ], [ "▁prudent", -13.030701637268066 ], [ "fotograf", -13.03084945678711 ], [ "paar", -13.030871391296387 ], [ "ergeti", -13.030927658081055 ], [ "▁empowerment", -13.031112670898438 ], [ "▁Admir", -13.03122329711914 ], [ "▁complémentaire", -13.031340599060059 ], [ "▁angepasst", -13.031376838684082 ], [ "▁flirt", -13.031376838684082 ], [ "▁elektronische", -13.031388282775879 ], [ "▁stereotype", -13.03140640258789 ], [ "SIL", -13.031465530395508 ], [ "▁Realtor", -13.031471252441406 ], [ "Edit", -13.031774520874023 ], [ "requête", -13.03181266784668 ], [ "▁Herstellung", -13.031815528869629 ], [ "▁cyst", -13.031947135925293 ], [ "syndic", -13.031994819641113 ], [ "leni", -13.032007217407227 ], [ "▁fringe", -13.032020568847656 ], [ "▁Jardin", -13.032032012939453 ], [ "▁Vezi", -13.032052993774414 ], [ "▁Ausstattung", -13.032312393188477 ], [ "▁glide", -13.032590866088867 ], [ "▁Andere", -13.032758712768555 ], [ "▁Haftung", -13.032781600952148 ], [ "maßnahmen", -13.032788276672363 ], [ "▁recommandé", -13.032790184020996 ], [ "▁nave", -13.032793998718262 ], [ "viziune", -13.033051490783691 ], [ "▁stimulus", -13.033098220825195 ], [ "faulty", -13.0331449508667 ], [ "▁vicinity", -13.033249855041504 ], [ "▁turnaround", -13.033445358276367 ], [ "stammt", -13.033846855163574 ], [ "▁problemlos", -13.033856391906738 ], [ "▁Establish", -13.03415298461914 ], [ "▁Silva", -13.034172058105469 ], [ "▁muzică", -13.034187316894531 ], [ "▁theatrical", -13.03421401977539 ], [ "▁braid", -13.034242630004883 ], [ "▁blieb", -13.034276962280273 ], [ "158", -13.034296989440918 ], [ "▁ignorance", -13.034330368041992 ], [ "onset", -13.034416198730469 ], [ "zeitlich", -13.034523963928223 ], [ "▁Sink", -13.034523963928223 ], [ "▁caractéris", -13.034594535827637 ], [ "▁kreative", -13.03465747833252 ], [ "behörde", -13.034677505493164 ], [ "repairing", -13.034680366516113 ], [ "▁tumble", -13.034757614135742 ], [ "zione", -13.034871101379395 ], [ "▁Evil", -13.03494644165039 ], [ "▁popping", -13.034952163696289 ], [ "▁mutant", -13.035025596618652 ], [ "emme", -13.035030364990234 ], [ "▁Pleasant", -13.035125732421875 ], [ "▁appetizer", -13.035125732421875 ], [ "▁PLEASE", -13.035126686096191 ], [ "▁physiological", -13.035128593444824 ], [ "▁Facility", -13.035131454467773 ], [ "▁quirky", -13.035131454467773 ], [ "▁colectiv", -13.035154342651367 ], [ "151", -13.035181999206543 ], [ "August", -13.03531551361084 ], [ "▁Jewelry", -13.035327911376953 ], [ "▁ziar", -13.035481452941895 ], [ "▁puissant", -13.035489082336426 ], [ "▁Argument", -13.035595893859863 ], [ "▁Betracht", -13.035621643066406 ], [ "▁TRANS", -13.035636901855469 ], [ "Exception", -13.036011695861816 ], [ "nosti", -13.036083221435547 ], [ "▁Geographic", -13.036155700683594 ], [ "amazingly", -13.036173820495605 ], [ "▁météo", -13.036181449890137 ], [ "streit", -13.036314010620117 ], [ "▁idle", -13.036439895629883 ], [ "179", -13.036441802978516 ], [ "▁Bremen", -13.036534309387207 ], [ "▁Kläger", -13.03653621673584 ], [ "▁Grammy", -13.036598205566406 ], [ "▁Philosophy", -13.036613464355469 ], [ "▁utilizeaz", -13.036779403686523 ], [ "Accord", -13.036897659301758 ], [ "▁USDA", -13.036986351013184 ], [ "Continuing", -13.037010192871094 ], [ "geschenk", -13.037178039550781 ], [ "kredit", -13.037248611450195 ], [ "Laugh", -13.037297248840332 ], [ "oaring", -13.037406921386719 ], [ "▁Richter", -13.037460327148438 ], [ "▁Figur", -13.037938117980957 ], [ "▁inconsistent", -13.037947654724121 ], [ "cresterea", -13.038069725036621 ], [ "▁regeneration", -13.038130760192871 ], [ "speaking", -13.03818416595459 ], [ "▁nasal", -13.03824234008789 ], [ "▁partagé", -13.038259506225586 ], [ "▁Warranty", -13.038419723510742 ], [ "▁Mueller", -13.038501739501953 ], [ "formează", -13.038734436035156 ], [ "hundert", -13.038745880126953 ], [ "gemeldet", -13.038893699645996 ], [ "▁excursions", -13.038912773132324 ], [ "▁linii", -13.039066314697266 ], [ "gefährlich", -13.039067268371582 ], [ "▁schema", -13.03907299041748 ], [ "nişte", -13.039131164550781 ], [ "▁roadway", -13.039132118225098 ], [ "▁regression", -13.039135932922363 ], [ "▁mână", -13.039366722106934 ], [ "5.3", -13.039373397827148 ], [ "▁Spät", -13.039734840393066 ], [ "▁stubborn", -13.039833068847656 ], [ "efectele", -13.040030479431152 ], [ "▁atenţi", -13.040136337280273 ], [ "▁dovedit", -13.04018497467041 ], [ "▁Agile", -13.040190696716309 ], [ "denying", -13.04023265838623 ], [ "fluss", -13.040620803833008 ], [ "▁Calvin", -13.04066276550293 ], [ "Sculpt", -13.04083251953125 ], [ "égalité", -13.040884971618652 ], [ "ticket", -13.040977478027344 ], [ "marketed", -13.041044235229492 ], [ "holic", -13.041173934936523 ], [ "▁eCommerce", -13.041346549987793 ], [ "▁Slip", -13.041369438171387 ], [ "▁degradation", -13.041736602783203 ], [ "écart", -13.041742324829102 ], [ "AGR", -13.041807174682617 ], [ "▁burglar", -13.041837692260742 ], [ "▁conjug", -13.041903495788574 ], [ "LLP", -13.04194164276123 ], [ "couvrir", -13.041997909545898 ], [ "▁Hearing", -13.042001724243164 ], [ "▁canton", -13.042006492614746 ], [ "▁sixteen", -13.042068481445312 ], [ "▁Verlust", -13.042097091674805 ], [ "allied", -13.042268753051758 ], [ "Performing", -13.042393684387207 ], [ "▁évoqu", -13.042519569396973 ], [ "▁bookstore", -13.042574882507324 ], [ "▁intrebari", -13.042627334594727 ], [ "▁Hyderabad", -13.042668342590332 ], [ "▁repertoire", -13.042668342590332 ], [ "▁cablu", -13.042678833007812 ], [ "▁Costume", -13.04269790649414 ], [ "▁Shannon", -13.042713165283203 ], [ "▁glossy", -13.042800903320312 ], [ "▁cible", -13.042876243591309 ], [ "Saint", -13.042984008789062 ], [ "▁Ultima", -13.043042182922363 ], [ "▁teint", -13.0432767868042 ], [ "▁envision", -13.043477058410645 ], [ "▁thinner", -13.043478965759277 ], [ "ис", -13.043609619140625 ], [ "▁bladder", -13.043615341186523 ], [ "▁Prairie", -13.043618202209473 ], [ "▁puppies", -13.043633460998535 ], [ "▁overweight", -13.043729782104492 ], [ "destined", -13.043925285339355 ], [ "▁addictive", -13.043935775756836 ], [ "▁posé", -13.043993949890137 ], [ "▁mecanism", -13.044112205505371 ], [ "▁chorus", -13.044466972351074 ], [ "weder", -13.044528007507324 ], [ "▁begrüß", -13.044562339782715 ], [ "▁unsuccessful", -13.044562339782715 ], [ "executing", -13.044564247131348 ], [ "▁metadata", -13.044611930847168 ], [ "traiter", -13.044620513916016 ], [ "▁borrowed", -13.044649124145508 ], [ "▁aeroport", -13.044679641723633 ], [ "▁Bibli", -13.044761657714844 ], [ "▁youthful", -13.044902801513672 ], [ "▁Herbert", -13.044913291931152 ], [ "client", -13.04500961303711 ], [ "merci", -13.04520034790039 ], [ "▁Beast", -13.045210838317871 ], [ "▁Entrepreneur", -13.045230865478516 ], [ "▁Gelände", -13.045256614685059 ], [ "▁Packers", -13.045268058776855 ], [ "formarea", -13.045469284057617 ], [ "▁Kündigung", -13.045511245727539 ], [ "▁verdient", -13.045515060424805 ], [ "▁solutie", -13.045530319213867 ], [ "figuration", -13.045611381530762 ], [ "voluntarily", -13.045622825622559 ], [ "Gregor", -13.045742988586426 ], [ "▁Uncle", -13.04589557647705 ], [ "tarifs", -13.045907020568848 ], [ "▁écologique", -13.045987129211426 ], [ "▁Investition", -13.045991897583008 ], [ "exemplar", -13.046127319335938 ], [ "▁prevede", -13.046144485473633 ], [ "▁waive", -13.046147346496582 ], [ "▁Legion", -13.046156883239746 ], [ "similar", -13.046247482299805 ], [ "▁shareholder", -13.04626750946045 ], [ "▁oyster", -13.046476364135742 ], [ "▁Lightning", -13.046530723571777 ], [ "experimenting", -13.04662799835205 ], [ "▁replies", -13.04663372039795 ], [ "80,000", -13.046757698059082 ], [ "▁adept", -13.04692554473877 ], [ "▁Crăciun", -13.046935081481934 ], [ "▁sanatos", -13.046935081481934 ], [ "305", -13.04699993133545 ], [ "specialised", -13.047069549560547 ], [ "▁drummer", -13.047189712524414 ], [ "Applicants", -13.04741096496582 ], [ "objekt", -13.04741096496582 ], [ "▁Fifth", -13.047446250915527 ], [ "rgic", -13.047567367553711 ], [ "theater", -13.047635078430176 ], [ "▁terminé", -13.047852516174316 ], [ "▁Englisch", -13.047894477844238 ], [ "▁Oradea", -13.047898292541504 ], [ "possesses", -13.0479097366333 ], [ "illiers", -13.047986030578613 ], [ "▁refurbish", -13.048110961914062 ], [ "graphie", -13.04814338684082 ], [ "▁Booth", -13.048174858093262 ], [ "▁Ausdruck", -13.048192977905273 ], [ "▁Marriage", -13.048361778259277 ], [ "▁knives", -13.048362731933594 ], [ "▁Relief", -13.048368453979492 ], [ "▁Clerk", -13.048392295837402 ], [ "wait", -13.048501014709473 ], [ "▁probablement", -13.048698425292969 ], [ "▁suplimentar", -13.048701286315918 ], [ "dollar", -13.048797607421875 ], [ "English", -13.04898452758789 ], [ "866", -13.049300193786621 ], [ "▁Savannah", -13.049314498901367 ], [ "▁aftermath", -13.049318313598633 ], [ "phé", -13.04932689666748 ], [ "▁Plum", -13.049417495727539 ], [ "264", -13.049566268920898 ], [ "2.000", -13.049582481384277 ], [ "niei", -13.049603462219238 ], [ "ATP", -13.049803733825684 ], [ "mila", -13.04985523223877 ], [ "▁glut", -13.049887657165527 ], [ "gotta", -13.049891471862793 ], [ "schütt", -13.049893379211426 ], [ "klick", -13.049996376037598 ], [ "whether", -13.050090789794922 ], [ "▁Wade", -13.050163269042969 ], [ "▁Riley", -13.050280570983887 ], [ "Chancellor", -13.050288200378418 ], [ "▁nebun", -13.050300598144531 ], [ "▁aufgebaut", -13.050374984741211 ], [ "steigt", -13.050423622131348 ], [ "▁entirety", -13.050494194030762 ], [ "▁telefoane", -13.05074691772461 ], [ "▁Roulette", -13.050763130187988 ], [ "1700", -13.050787925720215 ], [ "▁lycée", -13.050856590270996 ], [ "rotary", -13.051128387451172 ], [ "benefited", -13.051170349121094 ], [ "▁Bisericii", -13.051220893859863 ], [ "▁Rehabilitation", -13.051220893859863 ], [ "▁lithium", -13.051228523254395 ], [ "imposing", -13.051279067993164 ], [ "176", -13.051329612731934 ], [ "▁thunder", -13.051527976989746 ], [ "ăsesc", -13.052000045776367 ], [ "▁Einblick", -13.052010536193848 ], [ "oiled", -13.052151679992676 ], [ "SSA", -13.052181243896484 ], [ "apparition", -13.05224609375 ], [ "▁Impress", -13.052273750305176 ], [ "▁Aboriginal", -13.052297592163086 ], [ "loos", -13.052383422851562 ], [ "▁Bread", -13.052440643310547 ], [ "177", -13.052619934082031 ], [ "VERS", -13.052638053894043 ], [ "▁Respect", -13.05271053314209 ], [ "▁Practical", -13.053047180175781 ], [ "drafting", -13.05306339263916 ], [ "си", -13.053099632263184 ], [ "▁faza", -13.053109169006348 ], [ "▁sovereign", -13.053123474121094 ], [ "▁Untersuchung", -13.05314826965332 ], [ "▁Niveau", -13.053154945373535 ], [ "transport", -13.053182601928711 ], [ "▁downstream", -13.053293228149414 ], [ "▁Milton", -13.053383827209473 ], [ "▁knob", -13.053390502929688 ], [ "employeur", -13.053499221801758 ], [ "▁furnish", -13.053544044494629 ], [ "weather", -13.053564071655273 ], [ "LAB", -13.053646087646484 ], [ "166", -13.053853988647461 ], [ "▁salaire", -13.053937911987305 ], [ "▁Carnival", -13.054088592529297 ], [ "4-0", -13.054168701171875 ], [ "▁Angle", -13.054291725158691 ], [ "▁José", -13.054399490356445 ], [ "architecture", -13.054475784301758 ], [ "▁Sunset", -13.054574966430664 ], [ "▁Absolut", -13.054694175720215 ], [ "▁herrlich", -13.05470085144043 ], [ "12%", -13.054703712463379 ], [ "▁Indo", -13.054823875427246 ], [ "▁Komfort", -13.055049896240234 ], [ "▁acțiuni", -13.05505084991455 ], [ "energize", -13.055085182189941 ], [ "▁Warning", -13.055171966552734 ], [ "▁Sunny", -13.055216789245605 ], [ "▁razor", -13.055489540100098 ], [ "▁psychic", -13.055490493774414 ], [ "▁convivial", -13.055525779724121 ], [ "Voraussetzungen", -13.05555534362793 ], [ "IMO", -13.055622100830078 ], [ "opérateur", -13.055743217468262 ], [ "▁langjährige", -13.05575942993164 ], [ "▁Spanie", -13.055901527404785 ], [ "pulmonary", -13.056004524230957 ], [ "▁Bingo", -13.056050300598145 ], [ "▁confession", -13.056096076965332 ], [ "▁Petru", -13.056100845336914 ], [ "▁prerequisite", -13.056164741516113 ], [ "▁dodge", -13.056352615356445 ], [ "▁McN", -13.056436538696289 ], [ "▁originate", -13.056577682495117 ], [ "▁nettoy", -13.056612014770508 ], [ "▁$14", -13.056645393371582 ], [ "▁Bride", -13.05669116973877 ], [ "▁noisy", -13.05673885345459 ], [ "▁Worcester", -13.056963920593262 ], [ "▁Surrey", -13.056982040405273 ], [ "harmonis", -13.057110786437988 ], [ "▁représentant", -13.057304382324219 ], [ "organisée", -13.057475090026855 ], [ "truction", -13.057513236999512 ], [ "injected", -13.057597160339355 ], [ "▁Suzuki", -13.057924270629883 ], [ "▁japonais", -13.057924270629883 ], [ "▁turquoise", -13.057924270629883 ], [ "▁Peut", -13.058004379272461 ], [ "▁Sequ", -13.058028221130371 ], [ "slated", -13.058037757873535 ], [ "▁Alma", -13.058215141296387 ], [ "▁gebraucht", -13.05827522277832 ], [ "gängig", -13.058281898498535 ], [ "▁commis", -13.058377265930176 ], [ "ACS", -13.05856990814209 ], [ "pressure", -13.058664321899414 ], [ "cured", -13.05874252319336 ], [ "▁Jackie", -13.058757781982422 ], [ "▁Kashmir", -13.05888557434082 ], [ "▁recruited", -13.059000968933105 ], [ "▁vécu", -13.059011459350586 ], [ "▁opus", -13.059052467346191 ], [ "kWh", -13.05927562713623 ], [ "▁tapping", -13.059292793273926 ], [ "▁tehnologie", -13.05931282043457 ], [ "▁Gentle", -13.059365272521973 ], [ "▁bombard", -13.059372901916504 ], [ "▁caméra", -13.059427261352539 ], [ "züglich", -13.059431076049805 ], [ "▁bingo", -13.059453010559082 ], [ "private", -13.059496879577637 ], [ "▁mediator", -13.059642791748047 ], [ "▁carbohydrates", -13.059847831726074 ], [ "▁workmanship", -13.059849739074707 ], [ "▁Combat", -13.059853553771973 ], [ "▁Mickey", -13.059901237487793 ], [ "▁distressed", -13.059908866882324 ], [ "lucrează", -13.059924125671387 ], [ "treatment", -13.06007194519043 ], [ "▁Einwohner", -13.060330390930176 ], [ "▁glaze", -13.060386657714844 ], [ "scholarly", -13.06043529510498 ], [ "ROC", -13.060750007629395 ], [ "▁Darwin", -13.060774803161621 ], [ "drückt", -13.060775756835938 ], [ "▁treadmill", -13.060819625854492 ], [ "ntz", -13.060830116271973 ], [ "620", -13.061087608337402 ], [ "surface", -13.061148643493652 ], [ "▁vieţii", -13.0612211227417 ], [ "990", -13.061296463012695 ], [ "▁doigt", -13.061341285705566 ], [ "▁explor", -13.061450004577637 ], [ "▁asistent", -13.061670303344727 ], [ "coloriage", -13.061734199523926 ], [ "▁Martinez", -13.061758041381836 ], [ "▁antibodies", -13.061775207519531 ], [ "Schülerinnen", -13.061779975891113 ], [ "Honestly", -13.06178092956543 ], [ "grabbing", -13.061871528625488 ], [ "▁Cardiff", -13.061897277832031 ], [ "▁Trophy", -13.062084197998047 ], [ "▁pupil", -13.062117576599121 ], [ "▁invoke", -13.062161445617676 ], [ "bezüglich", -13.062193870544434 ], [ "Anschließend", -13.062275886535645 ], [ "perks", -13.062360763549805 ], [ "530", -13.062373161315918 ], [ "▁emblem", -13.062431335449219 ], [ "770", -13.062543869018555 ], [ "clairement", -13.062590599060059 ], [ "▁sublinia", -13.062597274780273 ], [ "▁1910", -13.062719345092773 ], [ "▁Embassy", -13.062740325927734 ], [ "▁Valencia", -13.062740325927734 ], [ "▁catastrophic", -13.062740325927734 ], [ "▁simulator", -13.06274700164795 ], [ "Pierre", -13.062766075134277 ], [ "▁doorstep", -13.062806129455566 ], [ "▁rallie", -13.062881469726562 ], [ "▁șans", -13.062891960144043 ], [ "▁crosses", -13.06300163269043 ], [ "▁zodi", -13.06312084197998 ], [ "Next", -13.06314754486084 ], [ "▁rebuilt", -13.063152313232422 ], [ "▁panorama", -13.063222885131836 ], [ "196", -13.06324291229248 ], [ "▁erinnert", -13.06370735168457 ], [ "lism", -13.06371784210205 ], [ "opened", -13.06383228302002 ], [ "▁breakout", -13.064126014709473 ], [ "▁mosque", -13.064153671264648 ], [ "boc", -13.064507484436035 ], [ "▁grout", -13.064568519592285 ], [ "▁Gather", -13.064582824707031 ], [ "▁vampire", -13.06467342376709 ], [ "▁tandem", -13.064684867858887 ], [ "▁pastra", -13.064702033996582 ], [ "▁lösen", -13.064794540405273 ], [ "▁discontinu", -13.064826965332031 ], [ "fuses", -13.064885139465332 ], [ "▁identitate", -13.064947128295898 ], [ "BAC", -13.064964294433594 ], [ "▁$100,000", -13.065122604370117 ], [ "Finder", -13.06515121459961 ], [ "▁Leicester", -13.065157890319824 ], [ "▁1933", -13.065159797668457 ], [ "informatiile", -13.065234184265137 ], [ "lädt", -13.065309524536133 ], [ "iggle", -13.065399169921875 ], [ "▁Discuss", -13.065462112426758 ], [ "distributing", -13.065470695495605 ], [ "▁disappoint", -13.065475463867188 ], [ "ecţia", -13.065611839294434 ], [ "▁condiment", -13.065640449523926 ], [ "▁Marriott", -13.065642356872559 ], [ "▁entspannt", -13.065644264221191 ], [ "arbitrary", -13.06564998626709 ], [ "rühren", -13.06574821472168 ], [ "Intensiv", -13.065771102905273 ], [ "eliminare", -13.065895080566406 ], [ "muster", -13.06594467163086 ], [ "▁komplexe", -13.066130638122559 ], [ "▁(2008)", -13.066184997558594 ], [ "absolument", -13.066349029541016 ], [ "aloo", -13.066420555114746 ], [ "cererea", -13.06655216217041 ], [ "▁imobiliar", -13.066696166992188 ], [ "▁paramount", -13.066705703735352 ], [ "▁Vince", -13.066723823547363 ], [ "pov", -13.067076683044434 ], [ "▁conveyor", -13.067549705505371 ], [ "▁Natalie", -13.067583084106445 ], [ "▁Comedy", -13.067623138427734 ], [ "Developing", -13.0678129196167 ], [ "disputed", -13.067878723144531 ], [ "164", -13.067911148071289 ], [ "▁Communist", -13.067949295043945 ], [ "▁Bahnhof", -13.06806468963623 ], [ "dokument", -13.068145751953125 ], [ "▁Somali", -13.06828498840332 ], [ "▁Strasbourg", -13.068503379821777 ], [ "▁Technician", -13.068550109863281 ], [ "▁subsidies", -13.068633079528809 ], [ "judeţul", -13.068723678588867 ], [ "▁bible", -13.068769454956055 ], [ "gefahren", -13.068855285644531 ], [ "▁literal", -13.068882942199707 ], [ "▁diminish", -13.068940162658691 ], [ "Sfântul", -13.0689697265625 ], [ "▁doreșt", -13.068978309631348 ], [ "▁Xiaomi", -13.069036483764648 ], [ "▁planète", -13.069130897521973 ], [ "▁LTD", -13.069175720214844 ], [ "▁Zugriff", -13.069196701049805 ], [ "beginn", -13.06921672821045 ], [ "▁Einführung", -13.069294929504395 ], [ "▁coronar", -13.069393157958984 ], [ "lomi", -13.0693941116333 ], [ "▁Accueil", -13.0695219039917 ], [ "scanned", -13.069528579711914 ], [ "▁Banque", -13.06952953338623 ], [ "▁réaction", -13.069531440734863 ], [ "▁Hoffman", -13.069546699523926 ], [ "▁merveille", -13.069637298583984 ], [ "navigating", -13.069719314575195 ], [ "schalten", -13.06984806060791 ], [ "▁ieşi", -13.070136070251465 ], [ "1-6", -13.070175170898438 ], [ "▁frustr", -13.070670127868652 ], [ "▁réfléchi", -13.0709810256958 ], [ "▁difuz", -13.071100234985352 ], [ "▁freue", -13.07121753692627 ], [ "besuch", -13.071349143981934 ], [ "153", -13.071386337280273 ], [ "▁butterflies", -13.071467399597168 ], [ "▁terrifying", -13.071467399597168 ], [ "▁încuraj", -13.071468353271484 ], [ "▁Château", -13.071470260620117 ], [ "▁contingent", -13.071474075317383 ], [ "▁abusive", -13.0714750289917 ], [ "▁SharePoint", -13.07148551940918 ], [ "▁skating", -13.071573257446289 ], [ "▁militaire", -13.07166576385498 ], [ "▁Vig", -13.071690559387207 ], [ "omics", -13.071840286254883 ], [ "▁Blockchain", -13.07197093963623 ], [ "▁principii", -13.071975708007812 ], [ "▁permitting", -13.071979522705078 ], [ "optimisation", -13.072270393371582 ], [ "▁maintien", -13.072328567504883 ], [ "▁Aluminum", -13.072442054748535 ], [ "▁Plymouth", -13.072443008422852 ], [ "▁Weiterbildung", -13.072457313537598 ], [ "▁Finanzierung", -13.072505950927734 ], [ "▁Kerala", -13.072514533996582 ], [ "insulated", -13.072668075561523 ], [ "▁loaf", -13.072802543640137 ], [ "▁Sammlung", -13.072929382324219 ], [ "▁îndepărt", -13.072930335998535 ], [ "▁Gewerbe", -13.072942733764648 ], [ "udel", -13.072988510131836 ], [ "▁coursework", -13.073104858398438 ], [ "▁Darstellung", -13.073246002197266 ], [ "▁indeplin", -13.073433876037598 ], [ "▁Gandhi", -13.073434829711914 ], [ "tossed", -13.07361888885498 ], [ "ewed", -13.073844909667969 ], [ "▁classement", -13.073884963989258 ], [ "▁Protestant", -13.073905944824219 ], [ "▁frumoasă", -13.073905944824219 ], [ "▁pantalon", -13.073906898498535 ], [ "▁rivet", -13.073966979980469 ], [ "▁Echt", -13.0741605758667 ], [ "erviciului", -13.07421588897705 ], [ "fabricated", -13.074322700500488 ], [ "Compania", -13.074372291564941 ], [ "▁juvenile", -13.074394226074219 ], [ "▁souligne", -13.07444953918457 ], [ "▁chrono", -13.07447338104248 ], [ "▁VII", -13.074594497680664 ], [ "▁Kirch", -13.074714660644531 ], [ "catcher", -13.075014114379883 ], [ "salv", -13.075263023376465 ], [ "▁Enforcement", -13.075370788574219 ], [ "▁Penguin", -13.075410842895508 ], [ "kowski", -13.075465202331543 ], [ "▁2:1", -13.075470924377441 ], [ "gesundheit", -13.075475692749023 ], [ "▁unveil", -13.075519561767578 ], [ "bending", -13.075531959533691 ], [ "▁conecta", -13.075579643249512 ], [ "▁faim", -13.075885772705078 ], [ "▁MacBook", -13.075969696044922 ], [ "versuch", -13.07600212097168 ], [ "▁regiuni", -13.076029777526855 ], [ "▁Willow", -13.076184272766113 ], [ "▁finanziell", -13.076303482055664 ], [ "▁nurturing", -13.076354026794434 ], [ "impuls", -13.076370239257812 ], [ "▁funktionieren", -13.076371192932129 ], [ "▁rezult", -13.076554298400879 ], [ "▁spui", -13.076593399047852 ], [ "▁walkway", -13.076653480529785 ], [ "▁Rauch", -13.076708793640137 ], [ "169", -13.076793670654297 ], [ "610", -13.076863288879395 ], [ "▁scazut", -13.0773286819458 ], [ "▁Garrett", -13.077329635620117 ], [ "▁necesită", -13.077352523803711 ], [ "Articolul", -13.077364921569824 ], [ "numită", -13.077371597290039 ], [ "Coastal", -13.077383041381836 ], [ "▁canned", -13.077421188354492 ], [ "▁Friendly", -13.077499389648438 ], [ "dissolved", -13.0775728225708 ], [ "seid", -13.077674865722656 ], [ "▁feminin", -13.077685356140137 ], [ "▁fetch", -13.077710151672363 ], [ "▁Accent", -13.077767372131348 ], [ "phrase", -13.077771186828613 ], [ "effekt", -13.077775955200195 ], [ "▁Progressive", -13.077777862548828 ], [ "▁canadien", -13.077820777893066 ], [ "iety", -13.077839851379395 ], [ "eignen", -13.077984809875488 ], [ "paraître", -13.07812213897705 ], [ "▁asylum", -13.07833194732666 ], [ "▁Albany", -13.078362464904785 ], [ "▁remis", -13.078386306762695 ], [ "▁Joyce", -13.078664779663086 ], [ "schätzt", -13.078784942626953 ], [ "▁begleiten", -13.078801155090332 ], [ "▁Siemens", -13.079007148742676 ], [ "▁schlimm", -13.079061508178711 ], [ "▁Libra", -13.079254150390625 ], [ "▁Composite", -13.079290390014648 ], [ "▁écr", -13.079315185546875 ], [ "disciplina", -13.079379081726074 ], [ "▁premature", -13.079630851745605 ], [ "▁scopuri", -13.079681396484375 ], [ "ffnung", -13.079715728759766 ], [ "7000", -13.079726219177246 ], [ "▁conséquent", -13.079780578613281 ], [ "▁côte", -13.079787254333496 ], [ "celul", -13.079872131347656 ], [ "▁fourteen", -13.079940795898438 ], [ "▁Riverside", -13.080077171325684 ], [ "gemacht", -13.08013916015625 ], [ "▁volcanic", -13.080272674560547 ], [ "▁Salesforce", -13.080315589904785 ], [ "▁Granite", -13.080317497253418 ], [ "▁Zentral", -13.080329895019531 ], [ "▁Female", -13.080341339111328 ], [ "▁culmin", -13.08047103881836 ], [ "▁urmatoare", -13.080547332763672 ], [ "toxicity", -13.080560684204102 ], [ "▁mâna", -13.080678939819336 ], [ "▁Umfang", -13.080764770507812 ], [ "▁Encore", -13.08077621459961 ], [ "▁Edgar", -13.080831527709961 ], [ "▁négoci", -13.080852508544922 ], [ "njeux", -13.080873489379883 ], [ "▁variance", -13.080917358398438 ], [ "▁Functional", -13.080973625183105 ], [ "172", -13.081046104431152 ], [ "▁dissolve", -13.0811185836792 ], [ "förderung", -13.081188201904297 ], [ "▁Brilliant", -13.081254959106445 ], [ "▁comprehension", -13.081254959106445 ], [ "▁soybean", -13.081254959106445 ], [ "▁standalone", -13.081255912780762 ], [ "▁Communi", -13.081303596496582 ], [ "▁ajut", -13.081313133239746 ], [ "▁lavish", -13.081338882446289 ], [ "Ouest", -13.081384658813477 ], [ "▁Maggie", -13.081385612487793 ], [ "▁evolutionary", -13.081550598144531 ], [ "bowel", -13.081575393676758 ], [ "▁glyco", -13.081626892089844 ], [ "▁Happi", -13.081706047058105 ], [ "organising", -13.081710815429688 ], [ "▁übernimm", -13.081727027893066 ], [ "▁snowboard", -13.081793785095215 ], [ "▁prévention", -13.081830024719238 ], [ "▁Celebrate", -13.082160949707031 ], [ "▁pottery", -13.082254409790039 ], [ "▁Outstanding", -13.082328796386719 ], [ "▁toamna", -13.082331657409668 ], [ "▁graceful", -13.082548141479492 ], [ "197", -13.082559585571289 ], [ "strecke", -13.082598686218262 ], [ "▁medizinische", -13.082733154296875 ], [ "216", -13.082839965820312 ], [ "▁prune", -13.082868576049805 ], [ "Pourtant", -13.083000183105469 ], [ "▁Difference", -13.083224296569824 ], [ "▁factura", -13.083830833435059 ], [ "Mass", -13.084161758422852 ], [ "▁Enhanc", -13.084190368652344 ], [ "upholstered", -13.084209442138672 ], [ "▁übernommen", -13.084209442138672 ], [ "▁mitigation", -13.084210395812988 ], [ "▁Hidden", -13.084219932556152 ], [ "▁Häuser", -13.084234237670898 ], [ "▁Pavel", -13.084403991699219 ], [ "▁congress", -13.084512710571289 ], [ "▁antibody", -13.084598541259766 ], [ "▁stitches", -13.084811210632324 ], [ "▁colonies", -13.084820747375488 ], [ "Into", -13.084900856018066 ], [ "▁démo", -13.084924697875977 ], [ "▁MVP", -13.085041046142578 ], [ "▁replay", -13.085062026977539 ], [ "▁usoara", -13.08522891998291 ], [ "▁Breast", -13.085278511047363 ], [ "ooney", -13.085336685180664 ], [ "▁außen", -13.085663795471191 ], [ "▁Motorola", -13.085695266723633 ], [ "▁spalat", -13.08578109741211 ], [ "euillez", -13.086088180541992 ], [ "▁jeunesse", -13.086170196533203 ], [ "▁pastoral", -13.086174011230469 ], [ "▁Sussex", -13.086185455322266 ], [ "▁stencil", -13.08619213104248 ], [ "▁organismului", -13.086504936218262 ], [ "seized", -13.086649894714355 ], [ "▁întrebare", -13.086865425109863 ], [ "cliquez", -13.086874961853027 ], [ "5.7", -13.086984634399414 ], [ "▁Yama", -13.087080955505371 ], [ "painted", -13.08708667755127 ], [ "▁Swimming", -13.087176322937012 ], [ "Rhythm", -13.087202072143555 ], [ "▁sorrow", -13.087210655212402 ], [ "▁Movers", -13.08731460571289 ], [ "renforcer", -13.08735466003418 ], [ "▁Wach", -13.087381362915039 ], [ "0,00", -13.087390899658203 ], [ "▁glove", -13.08753490447998 ], [ "▁stâng", -13.087669372558594 ], [ "rgendwann", -13.087687492370605 ], [ "▁Philippine", -13.08769416809082 ], [ "▁anunțat", -13.087716102600098 ], [ "▁Coleman", -13.087723731994629 ], [ "affir", -13.087918281555176 ], [ "uleiul", -13.08808422088623 ], [ "▁Coconut", -13.088197708129883 ], [ "▁Supplement", -13.088210105895996 ], [ "haudiere", -13.088293075561523 ], [ "▁kettle", -13.088313102722168 ], [ "▁3,5", -13.088370323181152 ], [ "refurbished", -13.088425636291504 ], [ "esthétique", -13.088665962219238 ], [ "performing", -13.088667869567871 ], [ "▁Engag", -13.088762283325195 ], [ "Group", -13.088801383972168 ], [ "▁viande", -13.088887214660645 ], [ "▁oricum", -13.088888168334961 ], [ "Spitalul", -13.089093208312988 ], [ "▁cesse", -13.089110374450684 ], [ "▁contradiction", -13.089130401611328 ], [ "▁Chrysler", -13.089154243469238 ], [ "▁poultry", -13.089154243469238 ], [ "▁thirteen", -13.089154243469238 ], [ "▁sightseeing", -13.089155197143555 ], [ "▁Miguel", -13.089158058166504 ], [ "▁terminology", -13.089334487915039 ], [ "▁Genetic", -13.089553833007812 ], [ "commercial", -13.08963394165039 ], [ "gehoben", -13.08965015411377 ], [ "RIGHT", -13.08995532989502 ], [ "▁proprietate", -13.089990615844727 ], [ "▁Cannes", -13.090012550354004 ], [ "▁klicken", -13.090023040771484 ], [ "▁Belgique", -13.0901460647583 ], [ "tapped", -13.09034538269043 ], [ "kinetic", -13.090569496154785 ], [ "▁feuilles", -13.090673446655273 ], [ "whitening", -13.090760231018066 ], [ "Any", -13.090946197509766 ], [ "Manager", -13.091099739074707 ], [ "▁constatat", -13.091106414794922 ], [ "▁Myanmar", -13.091140747070312 ], [ "▁Examination", -13.091142654418945 ], [ "▁règle", -13.091208457946777 ], [ "▁umgesetzt", -13.09128475189209 ], [ "211", -13.091336250305176 ], [ "▁Herald", -13.091449737548828 ], [ "Alex", -13.091680526733398 ], [ "▁drauf", -13.091707229614258 ], [ "logger", -13.091714859008789 ], [ "▁pictur", -13.09186840057373 ], [ "▁Divi", -13.09196949005127 ], [ "▁furnizat", -13.092089653015137 ], [ "▁verzichten", -13.092132568359375 ], [ "▁Sergi", -13.092199325561523 ], [ "contaminated", -13.09223747253418 ], [ "▁Buddy", -13.092243194580078 ], [ "▁chilled", -13.092268943786621 ], [ "▁vorlieg", -13.092317581176758 ], [ "▁Claudia", -13.092632293701172 ], [ "▁miserable", -13.092653274536133 ], [ "▁sketches", -13.092683792114258 ], [ "schicken", -13.092814445495605 ], [ "since", -13.0928373336792 ], [ "2.9", -13.092840194702148 ], [ "▁sitzen", -13.092928886413574 ], [ "ceapa", -13.093396186828613 ], [ "respectarea", -13.093438148498535 ], [ "▁handheld", -13.093448638916016 ], [ "popular", -13.093527793884277 ], [ "calming", -13.093603134155273 ], [ "Govern", -13.093632698059082 ], [ "▁omega", -13.093645095825195 ], [ "▁Planner", -13.093791007995605 ], [ "enriched", -13.093850135803223 ], [ "154", -13.093976974487305 ], [ "▁autorisé", -13.093989372253418 ], [ "▁cadouri", -13.09407901763916 ], [ "▁vulnerabilities", -13.094143867492676 ], [ "▁Arbeitnehmer", -13.094158172607422 ], [ "éditeur", -13.094234466552734 ], [ "▁Anleitung", -13.094317436218262 ], [ "rubbing", -13.094343185424805 ], [ "▁autovehicul", -13.094621658325195 ], [ "▁öffnen", -13.094621658325195 ], [ "▁Napoleon", -13.094622611999512 ], [ "▁cliché", -13.094637870788574 ], [ "▁Schaf", -13.09469985961914 ], [ "regulating", -13.094894409179688 ], [ "▁Kühl", -13.09490966796875 ], [ "▁blush", -13.094913482666016 ], [ "▁discard", -13.094992637634277 ], [ "▁confine", -13.095027923583984 ], [ "▁Rodriguez", -13.09511947631836 ], [ "▁ADHD", -13.095165252685547 ], [ "▁Madame", -13.09516716003418 ], [ "▁résolution", -13.095319747924805 ], [ "▁flair", -13.095369338989258 ], [ "▁claw", -13.095422744750977 ], [ "▁1929", -13.095643043518066 ], [ "ETH", -13.095672607421875 ], [ "nähe", -13.095804214477539 ], [ "▁soothe", -13.0958251953125 ], [ "4.9", -13.095833778381348 ], [ "montée", -13.095925331115723 ], [ "confirming", -13.095989227294922 ], [ "continent", -13.09613037109375 ], [ "reiz", -13.09643840789795 ], [ "john", -13.096577644348145 ], [ "IONAL", -13.096588134765625 ], [ "▁exported", -13.0966215133667 ], [ "▁Prison", -13.096651077270508 ], [ "possessed", -13.096952438354492 ], [ "▁placebo", -13.096991539001465 ], [ "▁biodiversity", -13.097116470336914 ], [ "▁combustion", -13.097116470336914 ], [ "▁Plumbing", -13.09711742401123 ], [ "ixie", -13.097124099731445 ], [ "▁repetition", -13.09715461730957 ], [ "▁soumis", -13.097372055053711 ], [ "▁reduc", -13.097671508789062 ], [ "▁constrain", -13.097759246826172 ], [ "Anti", -13.097760200500488 ], [ "consolidated", -13.097817420959473 ], [ "214", -13.098095893859863 ], [ "▁breaches", -13.098108291625977 ], [ "infringement", -13.098115921020508 ], [ "▁drizzle", -13.098115921020508 ], [ "▁erhöhen", -13.098116874694824 ], [ "▁Somerset", -13.098118782043457 ], [ "▁blonde", -13.098132133483887 ], [ "▁Funny", -13.09813404083252 ], [ "tuşi", -13.098149299621582 ], [ "▁reinvent", -13.098162651062012 ], [ "▁sérieux", -13.098247528076172 ], [ "▁croire", -13.098308563232422 ], [ "general", -13.098315238952637 ], [ "▁Distance", -13.098319053649902 ], [ "▁VoIP", -13.098348617553711 ], [ "▁adăugat", -13.098406791687012 ], [ "matik", -13.098546028137207 ], [ "▁avatar", -13.098647117614746 ], [ "▁superstar", -13.098804473876953 ], [ "8.0", -13.098814010620117 ], [ "lusieurs", -13.098982810974121 ], [ "▁Judeţean", -13.099117279052734 ], [ "offenen", -13.099128723144531 ], [ "RAF", -13.099133491516113 ], [ "▁restroom", -13.099207878112793 ], [ "enfance", -13.099348068237305 ], [ "▁garnish", -13.099499702453613 ], [ "▁vermittelt", -13.099631309509277 ], [ "Histoire", -13.099634170532227 ], [ "cyan", -13.100628852844238 ], [ "Talk", -13.100666046142578 ], [ "▁Varianten", -13.10069465637207 ], [ "▁Lille", -13.10085678100586 ], [ "▁offenbar", -13.10098934173584 ], [ "▁rénovation", -13.10112190246582 ], [ "▁comentarii", -13.101249694824219 ], [ "▁Bedford", -13.10130500793457 ], [ "▁cercetări", -13.101325988769531 ], [ "▁précision", -13.101337432861328 ], [ "MRC", -13.101358413696289 ], [ "alterations", -13.101476669311523 ], [ "▁discours", -13.101531028747559 ], [ "äger", -13.101577758789062 ], [ "▁antreprenor", -13.101622581481934 ], [ "▁Oriental", -13.101849555969238 ], [ "conducerea", -13.101868629455566 ], [ "CBC", -13.101932525634766 ], [ "▁mince", -13.101985931396484 ], [ "▁presidency", -13.10212516784668 ], [ "▁lipstick", -13.102167129516602 ], [ "▁SERVICES", -13.102237701416016 ], [ "productive", -13.10237979888916 ], [ "Assad", -13.102400779724121 ], [ "▁efectiv", -13.102540969848633 ], [ "▁gestern", -13.102596282958984 ], [ "▁RGB", -13.102606773376465 ], [ "▁Transilvania", -13.102627754211426 ], [ "▁Raleigh", -13.102670669555664 ], [ "DOM", -13.102702140808105 ], [ "▁iesit", -13.102806091308594 ], [ "▁anuntat", -13.102810859680176 ], [ "▁automatiquement", -13.102901458740234 ], [ "▁proliferation", -13.103130340576172 ], [ "▁Maroc", -13.103156089782715 ], [ "▁prezenţ", -13.10323429107666 ], [ "▁Filipino", -13.103296279907227 ], [ "▁Traian", -13.103351593017578 ], [ "▁swimmer", -13.10356616973877 ], [ "▁Slovenia", -13.103632926940918 ], [ "phobia", -13.103724479675293 ], [ "curricular", -13.103734016418457 ], [ "jurnal", -13.103825569152832 ], [ "▁vorne", -13.103870391845703 ], [ "▁asuma", -13.103875160217285 ], [ "defended", -13.104104995727539 ], [ "▁imminent", -13.104140281677246 ], [ "favored", -13.10417366027832 ], [ "▁innovator", -13.104179382324219 ], [ "▁Salzburg", -13.104289054870605 ], [ "5.4", -13.104452133178711 ], [ "Safe", -13.104597091674805 ], [ "▁inteleg", -13.104744911193848 ], [ "▁charisma", -13.104781150817871 ], [ "nature", -13.104784965515137 ], [ "4.8", -13.104942321777344 ], [ "argues", -13.105104446411133 ], [ "▁dimensiune", -13.105142593383789 ], [ "▁subdivision", -13.105142593383789 ], [ "▁embarrassing", -13.105144500732422 ], [ "▁confuse", -13.105207443237305 ], [ "DIC", -13.105460166931152 ], [ "rubrique", -13.10549545288086 ], [ "dépendance", -13.105598449707031 ], [ "INCLUD", -13.10565185546875 ], [ "▁Griffin", -13.10574722290039 ], [ "157", -13.105751037597656 ], [ "▁revamp", -13.105839729309082 ], [ "▁umgehen", -13.10595989227295 ], [ "▁mențin", -13.106231689453125 ], [ "▁1937", -13.106695175170898 ], [ "eklagte", -13.106766700744629 ], [ "▁clientèle", -13.106801986694336 ], [ "▁campsite", -13.10708999633789 ], [ "▁florist", -13.107144355773926 ], [ "▁Ferguson", -13.107159614562988 ], [ "▁demolition", -13.107160568237305 ], [ "▁McCain", -13.107254981994629 ], [ "▁reckon", -13.10733413696289 ], [ "striped", -13.107414245605469 ], [ "▁sonore", -13.107481002807617 ], [ "migrated", -13.107548713684082 ], [ "▁fluorescent", -13.107664108276367 ], [ "▁Colegi", -13.107762336730957 ], [ "ianu", -13.107860565185547 ], [ "cruising", -13.107882499694824 ], [ "LINK", -13.107965469360352 ], [ "▁Cutting", -13.108001708984375 ], [ "ABILITY", -13.108168601989746 ], [ "▁Categories", -13.108168601989746 ], [ "▁erhoben", -13.108168601989746 ], [ "▁Cocktail", -13.108169555664062 ], [ "▁Generator", -13.108177185058594 ], [ "▁gesucht", -13.108186721801758 ], [ "▁telescope", -13.10818862915039 ], [ "KET", -13.108192443847656 ], [ "▁hilfreich", -13.108192443847656 ], [ "▁beneficiary", -13.108585357666016 ], [ "▁Winston", -13.108636856079102 ], [ "Auswirkungen", -13.108675956726074 ], [ "portrayed", -13.108705520629883 ], [ "▁Aspekte", -13.108743667602539 ], [ "ffected", -13.108901023864746 ], [ "eutic", -13.108905792236328 ], [ "International", -13.109021186828613 ], [ "attente", -13.109078407287598 ], [ "mentioning", -13.109119415283203 ], [ "launch", -13.109129905700684 ], [ "▁EURO", -13.109152793884277 ], [ "▁Fraser", -13.109344482421875 ], [ "▁Johannes", -13.109408378601074 ], [ "▁felicit", -13.109477043151855 ], [ "▁plâng", -13.109522819519043 ], [ "izant", -13.10971736907959 ], [ "▁reţe", -13.109846115112305 ], [ "Mech", -13.109954833984375 ], [ "▁algebra", -13.110193252563477 ], [ "▁surgeries", -13.110257148742676 ], [ "▁semifinal", -13.110262870788574 ], [ "▁intimidating", -13.110288619995117 ], [ "▁exkl", -13.110604286193848 ], [ "asigurarea", -13.110918998718262 ], [ "Tek", -13.111136436462402 ], [ "▁Einladung", -13.111205101013184 ], [ "▁similaire", -13.111205101013184 ], [ "▁bebelus", -13.111221313476562 ], [ "▁déclin", -13.111400604248047 ], [ "▁Console", -13.111495018005371 ], [ "RET", -13.111573219299316 ], [ "appli", -13.111586570739746 ], [ "45%", -13.111663818359375 ], [ "Evenimentul", -13.111811637878418 ], [ "sincerely", -13.111812591552734 ], [ "sammlung", -13.112098693847656 ], [ "Amérique", -13.112220764160156 ], [ "▁1919", -13.112326622009277 ], [ "regulation", -13.112367630004883 ], [ "gebäude", -13.112726211547852 ], [ "▁Perspektive", -13.112726211547852 ], [ "Espagne", -13.112744331359863 ], [ "▁Underground", -13.11283016204834 ], [ "secret", -13.112833976745605 ], [ "▁Aussicht", -13.112874031066895 ], [ "Photo", -13.112977027893066 ], [ "▁Brust", -13.113144874572754 ], [ "▁Sustainability", -13.11323356628418 ], [ "▁clădiri", -13.11323356628418 ], [ "▁librarian", -13.11323356628418 ], [ "▁HBO", -13.113235473632812 ], [ "▁Parallel", -13.113240242004395 ], [ "▁shimmer", -13.113283157348633 ], [ "▁schlicht", -13.113292694091797 ], [ "▁anticipat", -13.113311767578125 ], [ "▁foolish", -13.11335563659668 ], [ "▁Ability", -13.11347484588623 ], [ "▁ceremoni", -13.11358642578125 ], [ "▁Ablauf", -13.11359977722168 ], [ "icrobial", -13.113606452941895 ], [ "▁actiuni", -13.11362361907959 ], [ "▁Wilhelm", -13.113761901855469 ], [ "▁nennen", -13.113775253295898 ], [ "▁botez", -13.113832473754883 ], [ "Alpes", -13.113912582397461 ], [ "▁libér", -13.11392593383789 ], [ "▁sneakers", -13.114052772521973 ], [ "geschafft", -13.114252090454102 ], [ "▁downstairs", -13.114261627197266 ], [ "▁wrench", -13.114294052124023 ], [ "▁erheblich", -13.11442756652832 ], [ "▁alimentar", -13.114710807800293 ], [ "▁suger", -13.11474323272705 ], [ "analysis", -13.114883422851562 ], [ "öhn", -13.114891052246094 ], [ "▁Nantes", -13.114895820617676 ], [ "▁Arbor", -13.114899635314941 ], [ "ooze", -13.115150451660156 ], [ "▁facade", -13.115229606628418 ], [ "▁MySQL", -13.115266799926758 ], [ "▁Salvador", -13.115266799926758 ], [ "▁Schlafzimmer", -13.115279197692871 ], [ "▁autentic", -13.115320205688477 ], [ "▁prezint", -13.115348815917969 ], [ "▁campground", -13.115397453308105 ], [ "Query", -13.11540412902832 ], [ "bekannt", -13.115598678588867 ], [ "arcinia", -13.115632057189941 ], [ "▁stunt", -13.115825653076172 ], [ "▁informare", -13.115830421447754 ], [ "▁interzis", -13.11584186553955 ], [ "▁Burke", -13.115995407104492 ], [ "certified", -13.11601734161377 ], [ "▁clove", -13.11605167388916 ], [ "java", -13.116271018981934 ], [ "▁Vielfalt", -13.116284370422363 ], [ "gebung", -13.116329193115234 ], [ "▁9/11", -13.116497993469238 ], [ "▁disruptive", -13.11650562286377 ], [ "visual", -13.116693496704102 ], [ "▁anunţat", -13.11679458618164 ], [ "▁Plätze", -13.116799354553223 ], [ "▁reduceri", -13.116920471191406 ], [ "autorisation", -13.116950035095215 ], [ "▁ligament", -13.11705207824707 ], [ "▁învăța", -13.117081642150879 ], [ "läufig", -13.117303848266602 ], [ "▁Copenhagen", -13.117303848266602 ], [ "▁commodities", -13.117303848266602 ], [ "▁eindeutig", -13.117313385009766 ], [ "▁catheter", -13.117321014404297 ], [ "erklärung", -13.117720603942871 ], [ "▁intelectual", -13.117814064025879 ], [ "▁municipality", -13.117891311645508 ], [ "▁1936", -13.11798095703125 ], [ "rruption", -13.118217468261719 ], [ "▁Lafayette", -13.118324279785156 ], [ "▁berühmte", -13.118324279785156 ], [ "▁idylli", -13.118325233459473 ], [ "▁caldura", -13.118447303771973 ], [ "▁tablette", -13.118535995483398 ], [ "▁liquidity", -13.118728637695312 ], [ "NGOs", -13.118885040283203 ], [ "▁supliment", -13.11889934539795 ], [ "contact", -13.119075775146484 ], [ "lustig", -13.119219779968262 ], [ "▁watercolor", -13.119319915771484 ], [ "▁Tiffany", -13.119344711303711 ], [ "▁Glauben", -13.119365692138672 ], [ "Immobilie", -13.119406700134277 ], [ "▁stripped", -13.119549751281738 ], [ "▁Beatles", -13.119601249694824 ], [ "ани", -13.119770050048828 ], [ "▁lifespan", -13.119986534118652 ], [ "▁profondeur", -13.120251655578613 ], [ "▁durere", -13.120329856872559 ], [ "▁Lithuania", -13.120367050170898 ], [ "▁resurrection", -13.120367050170898 ], [ "▁suitcase", -13.120535850524902 ], [ "▁Plumber", -13.120545387268066 ], [ "criticized", -13.120595932006836 ], [ "feared", -13.120756149291992 ], [ "▁Aunt", -13.120929718017578 ], [ "otwithstanding", -13.121068000793457 ], [ "verständlich", -13.12115478515625 ], [ "fiber", -13.121248245239258 ], [ "headquartered", -13.121390342712402 ], [ "▁Perspective", -13.121391296386719 ], [ "▁semantic", -13.121413230895996 ], [ "VIEW", -13.121431350708008 ], [ "▁Ersatzteile", -13.121567726135254 ], [ "▁disgust", -13.121685981750488 ], [ "rrington", -13.121834754943848 ], [ "ässe", -13.121922492980957 ], [ "▁anerkannt", -13.121956825256348 ], [ "meaning", -13.12203598022461 ], [ "178", -13.122039794921875 ], [ "▁grupuri", -13.1221284866333 ], [ "ciones", -13.122267723083496 ], [ "▁Mobility", -13.122414588928223 ], [ "▁unstable", -13.122422218322754 ], [ "▁FULL", -13.122456550598145 ], [ "austausch", -13.122491836547852 ], [ "▁culminat", -13.122549057006836 ], [ "▁Roast", -13.122742652893066 ], [ "existant", -13.122940063476562 ], [ "167", -13.123008728027344 ], [ "tinerii", -13.123040199279785 ], [ "September", -13.123115539550781 ], [ "▁haircut", -13.123274803161621 ], [ "▁Tutorial", -13.123440742492676 ], [ "▁enquiries", -13.123440742492676 ], [ "▁livelihood", -13.123440742492676 ], [ "▁proficiency", -13.123440742492676 ], [ "▁pavement", -13.123443603515625 ], [ "▁Reservation", -13.123445510864258 ], [ "aimerai", -13.123491287231445 ], [ "▁laboratoire", -13.123492240905762 ], [ "leihen", -13.123501777648926 ], [ "ministerium", -13.123518943786621 ], [ "▁Concentr", -13.12366008758545 ], [ "▁swipe", -13.12368106842041 ], [ "extrêmement", -13.123687744140625 ], [ "cultivated", -13.123708724975586 ], [ "▁Converse", -13.123845100402832 ], [ "▁paycheck", -13.123863220214844 ], [ "olltest", -13.123995780944824 ], [ "▁Bauch", -13.124022483825684 ], [ "▁autobuz", -13.124067306518555 ], [ "attack", -13.124094009399414 ], [ "While", -13.124311447143555 ], [ "Retrouvez", -13.124320983886719 ], [ "▁Dolphin", -13.124466896057129 ], [ "▁Shelby", -13.124480247497559 ], [ "▁Diagnostic", -13.124486923217773 ], [ "▁reconcil", -13.124558448791504 ], [ "▁Iaşi", -13.124733924865723 ], [ "▁iubesc", -13.124979972839355 ], [ "▁Bestseller", -13.124985694885254 ], [ "▁antrenor", -13.125035285949707 ], [ "▁Imaging", -13.125089645385742 ], [ "▁priorité", -13.125295639038086 ], [ "▁brewery", -13.125494003295898 ], [ "▁residual", -13.125494003295898 ], [ "▁intermittent", -13.125494956970215 ], [ "Kollekt", -13.125585556030273 ], [ "▁Walsh", -13.12558650970459 ], [ "▁marvelous", -13.125653266906738 ], [ "canceled", -13.125686645507812 ], [ "174", -13.125761985778809 ], [ "normes", -13.125837326049805 ], [ "▁Tempo", -13.125996589660645 ], [ "▁Târgu", -13.126008987426758 ], [ "877", -13.126165390014648 ], [ "5-8", -13.126190185546875 ], [ "960", -13.126486778259277 ], [ "▁Scandinavia", -13.1265230178833 ], [ "▁prolific", -13.126526832580566 ], [ "lasi", -13.126916885375977 ], [ "glück", -13.127097129821777 ], [ "▁immersion", -13.127204895019531 ], [ "RSA", -13.127323150634766 ], [ "▁Polk", -13.127340316772461 ], [ "▁transmitter", -13.12747859954834 ], [ "▁Kleidung", -13.12755298614502 ], [ "▁Cosmo", -13.127676963806152 ], [ "▁1935", -13.127788543701172 ], [ "höhere", -13.127906799316406 ], [ "▁Tatsache", -13.128074645996094 ], [ "▁Outlet", -13.1282377243042 ], [ "▁canalisation", -13.12824821472168 ], [ "Mbps", -13.128433227539062 ], [ "▁skeptical", -13.128582954406738 ], [ "mplification", -13.128617286682129 ], [ "▁Advice", -13.128618240356445 ], [ "▁détaillé", -13.128676414489746 ], [ "660", -13.128701210021973 ], [ "▁eyebrow", -13.128722190856934 ], [ "▁HIGH", -13.128898620605469 ], [ "hnlich", -13.129073143005371 ], [ "▁depăș", -13.12910270690918 ], [ "▁procurori", -13.129140853881836 ], [ "▁refrain", -13.129212379455566 ], [ "▁geschaffen", -13.12952995300293 ], [ "justement", -13.129663467407227 ], [ "exposing", -13.129700660705566 ], [ "243", -13.1298828125 ], [ "sectorul", -13.130104064941406 ], [ "▁courrier", -13.130180358886719 ], [ "▁carcas", -13.130199432373047 ], [ "sitter", -13.13022518157959 ], [ "▁Schreiben", -13.130335807800293 ], [ "▁malfunction", -13.130358695983887 ], [ "poartă", -13.130522727966309 ], [ "raisons", -13.130565643310547 ], [ "▁HOT", -13.130650520324707 ], [ "▁refreshed", -13.130730628967285 ], [ "mânt", -13.130744934082031 ], [ "▁coefficient", -13.13097858428955 ], [ "▁instituţii", -13.131194114685059 ], [ "▁sanguin", -13.131202697753906 ], [ "▁ceci", -13.131213188171387 ], [ "▁garçon", -13.131232261657715 ], [ "deluxe", -13.131237030029297 ], [ "▁rectif", -13.131311416625977 ], [ "920", -13.131364822387695 ], [ "Exista", -13.131428718566895 ], [ "▁magnif", -13.131568908691406 ], [ "efficiencies", -13.131681442260742 ], [ "▁Mitsubishi", -13.131681442260742 ], [ "▁consortium", -13.131681442260742 ], [ "▁baggage", -13.131683349609375 ], [ "▁guild", -13.131736755371094 ], [ "▁sixty", -13.13193130493164 ], [ "▁Retreat", -13.13245677947998 ], [ "batting", -13.132473945617676 ], [ "470", -13.132708549499512 ], [ "▁Britanie", -13.132718086242676 ], [ "displaced", -13.132734298706055 ], [ "▁spați", -13.132794380187988 ], [ "▁exceptionnelle", -13.13281536102295 ], [ "▁authorize", -13.132906913757324 ], [ "▁prescribe", -13.133187294006348 ], [ "▁dépannage", -13.133234024047852 ], [ "▁sexuelle", -13.133234024047852 ], [ "valid", -13.133275032043457 ], [ "▁hymn", -13.133752822875977 ], [ "▁histories", -13.133757591247559 ], [ "▁oriunde", -13.133764266967773 ], [ "Pop", -13.133785247802734 ], [ "▁dispoziţi", -13.133800506591797 ], [ "ADI", -13.133819580078125 ], [ "Google", -13.133830070495605 ], [ "▁Autism", -13.133918762207031 ], [ "▁aggr", -13.134354591369629 ], [ "bleed", -13.134618759155273 ], [ "▁displacement", -13.13478946685791 ], [ "▁hobbies", -13.13478946685791 ], [ "▁anatomy", -13.134799003601074 ], [ "▁Klinik", -13.134821891784668 ], [ "▁CCTV", -13.1348237991333 ], [ "readable", -13.134886741638184 ], [ "ulph", -13.134982109069824 ], [ "metabol", -13.135035514831543 ], [ "▁rugăm", -13.135037422180176 ], [ "▁Scotia", -13.135087013244629 ], [ "▁Einheit", -13.135211944580078 ], [ "▁troupe", -13.13581371307373 ], [ "▁Practitioner", -13.135828018188477 ], [ "▁oarec", -13.135909080505371 ], [ "Appel", -13.135998725891113 ], [ "situația", -13.136096000671387 ], [ "▁Yemen", -13.136353492736816 ], [ "piping", -13.136515617370605 ], [ "blood", -13.136772155761719 ], [ "engraved", -13.136866569519043 ], [ "▁Cristina", -13.136866569519043 ], [ "▁inaccurate", -13.136866569519043 ], [ "savory", -13.136878967285156 ], [ "atism", -13.136919021606445 ], [ "▁dependency", -13.137007713317871 ], [ "▁assertion", -13.137015342712402 ], [ "▁intersect", -13.137201309204102 ], [ "DATA", -13.137224197387695 ], [ "▁britanic", -13.1373872756958 ], [ "▁sanitaire", -13.137393951416016 ], [ "▁PLUS", -13.137436866760254 ], [ "▁platter", -13.137730598449707 ], [ "▁reconsider", -13.137802124023438 ], [ "▁Swim", -13.13786792755127 ], [ "▁Scene", -13.137896537780762 ], [ "▁Reynolds", -13.137907028198242 ], [ "▁gesund", -13.137922286987305 ], [ "international", -13.137959480285645 ], [ "government", -13.13804817199707 ], [ "▁gemstone", -13.138052940368652 ], [ "▁reproductive", -13.1381196975708 ], [ "▁expressive", -13.13820743560791 ], [ "▁tranche", -13.13842487335205 ], [ "▁Niagara", -13.138427734375 ], [ "▁Studierende", -13.138434410095215 ], [ "▁crave", -13.138607025146484 ], [ "pathetic", -13.138739585876465 ], [ "▁1916", -13.138858795166016 ], [ "▁Thousand", -13.138873100280762 ], [ "uffed", -13.138893127441406 ], [ "▁Lancaster", -13.138960838317871 ], [ "▁revenge", -13.138972282409668 ], [ "▁melody", -13.1389741897583 ], [ "Suitable", -13.138991355895996 ], [ "▁beacon", -13.139082908630371 ], [ "▁MAY", -13.139205932617188 ], [ "livré", -13.139216423034668 ], [ "Virus", -13.139391899108887 ], [ "▁collaborator", -13.139413833618164 ], [ "produktion", -13.139480590820312 ], [ "▁iluminat", -13.139593124389648 ], [ "facets", -13.13975715637207 ], [ "▁expus", -13.139784812927246 ], [ "▁baptism", -13.13999080657959 ], [ "▁urgency", -13.140016555786133 ], [ "artery", -13.14030647277832 ], [ "▁eingeladen", -13.14043140411377 ], [ "▁entfernen", -13.14051342010498 ], [ "soaking", -13.140555381774902 ], [ "▁irré", -13.140557289123535 ], [ "▁purity", -13.140700340270996 ], [ "▁adăug", -13.140731811523438 ], [ "historischen", -13.140777587890625 ], [ "crezi", -13.140793800354004 ], [ "▁tarziu", -13.141035079956055 ], [ "▁Mozart", -13.141040802001953 ], [ "▁trimming", -13.141056060791016 ], [ "▁violat", -13.141056060791016 ], [ "▁Vermögen", -13.14108943939209 ], [ "▁Theorie", -13.141114234924316 ], [ "scheibe", -13.14114761352539 ], [ "Partidul", -13.141324996948242 ], [ "▁childcare", -13.14133071899414 ], [ "ajele", -13.141345977783203 ], [ "▁Punjab", -13.141390800476074 ], [ "6.3", -13.14156436920166 ], [ "▁recount", -13.141571044921875 ], [ "▁repel", -13.141799926757812 ], [ "vantage", -13.1419095993042 ], [ "6.4", -13.141953468322754 ], [ "▁comedian", -13.142087936401367 ], [ "▁snappe", -13.142256736755371 ], [ "PLE", -13.142271041870117 ], [ "▁rapper", -13.142439842224121 ], [ "▁Belfast", -13.142657279968262 ], [ "▁predictive", -13.14271068572998 ], [ "dépôt", -13.1427583694458 ], [ "flavored", -13.142769813537598 ], [ "chließlich", -13.14293098449707 ], [ "▁stump", -13.142955780029297 ], [ "▁lakh", -13.142963409423828 ], [ "3:30", -13.143021583557129 ], [ "▁cetățeni", -13.1431245803833 ], [ "▁Milliarden", -13.143125534057617 ], [ "Assurance", -13.143128395080566 ], [ "▁Marketplace", -13.143329620361328 ], [ "equipped", -13.143423080444336 ], [ "▁russe", -13.143462181091309 ], [ "Exactly", -13.143651008605957 ], [ "▁Venez", -13.144125938415527 ], [ "▁Pavilion", -13.144171714782715 ], [ "▁incontournable", -13.144171714782715 ], [ "▁slaughter", -13.14417839050293 ], [ "asteptam", -13.144190788269043 ], [ "▁Fighter", -13.144196510314941 ], [ "▁Landkreis", -13.144278526306152 ], [ "▁lumini", -13.144312858581543 ], [ "▁connaît", -13.144615173339844 ], [ "▁Breite", -13.144674301147461 ], [ "▁Disability", -13.144774436950684 ], [ "▁Alfa", -13.144786834716797 ], [ "▁poise", -13.144895553588867 ], [ "▁Alpen", -13.144898414611816 ], [ "betont", -13.145031929016113 ], [ "159", -13.145161628723145 ], [ "▁geprägt", -13.145219802856445 ], [ "▁intrigued", -13.145219802856445 ], [ "▁sympathy", -13.145220756530762 ], [ "societal", -13.145225524902344 ], [ "▁sédui", -13.145243644714355 ], [ "▁differentiation", -13.145384788513184 ], [ "▁aprobare", -13.145744323730469 ], [ "schirm", -13.14585018157959 ], [ "sagt", -13.145956039428711 ], [ "7.3", -13.146101951599121 ], [ "Bib", -13.146263122558594 ], [ "europäischen", -13.146268844604492 ], [ "▁Innovative", -13.146268844604492 ], [ "▁autonome", -13.146330833435059 ], [ "▁Objective", -13.146400451660156 ], [ "▁refusal", -13.146551132202148 ], [ "▁exposé", -13.146719932556152 ], [ "▁cetăţeni", -13.146793365478516 ], [ "▁stimmt", -13.146798133850098 ], [ "acordul", -13.147162437438965 ], [ "▁hormonal", -13.147254943847656 ], [ "intermédiaire", -13.147319793701172 ], [ "▁doubl", -13.147374153137207 ], [ "▁flute", -13.147509574890137 ], [ "▁Balkon", -13.147523880004883 ], [ "▁Florian", -13.147607803344727 ], [ "737", -13.147614479064941 ], [ "▁dritte", -13.147639274597168 ], [ "spitze", -13.147685050964355 ], [ "donnent", -13.14778995513916 ], [ "▁Zuhause", -13.147850036621094 ], [ "▁VIII", -13.147852897644043 ], [ "familien", -13.148151397705078 ], [ "▁sécurisé", -13.148313522338867 ], [ "▁glamour", -13.148370742797852 ], [ "▁societati", -13.148370742797852 ], [ "typique", -13.1483793258667 ], [ "▁addicted", -13.148421287536621 ], [ "▁Providence", -13.148500442504883 ], [ "▁Extended", -13.148506164550781 ], [ "▁Barbie", -13.148513793945312 ], [ "zustand", -13.148516654968262 ], [ "▁Sauna", -13.148638725280762 ], [ "▁propane", -13.148663520812988 ], [ "europa", -13.148894309997559 ], [ "glued", -13.148940086364746 ], [ "▁Mystery", -13.148941993713379 ], [ "▁travaillé", -13.149106979370117 ], [ "riol", -13.149251937866211 ], [ "fleisch", -13.149288177490234 ], [ "▁Eintritt", -13.149327278137207 ], [ "▁Syndrome", -13.149422645568848 ], [ "▁petroleum", -13.149426460266113 ], [ "▁genial", -13.149433135986328 ], [ "sponsored", -13.149436950683594 ], [ "▁Cindy", -13.149436950683594 ], [ "▁courier", -13.149600982666016 ], [ "▁Scrap", -13.149640083312988 ], [ "▁conţin", -13.149724006652832 ], [ "(2007)", -13.149764060974121 ], [ "▁gewährleisten", -13.149949073791504 ], [ "▁proprietor", -13.15011215209961 ], [ "▁cheque", -13.15046215057373 ], [ "maternity", -13.150477409362793 ], [ "▁Gustav", -13.15048599243164 ], [ "▁arterial", -13.150497436523438 ], [ "▁whiskey", -13.150510787963867 ], [ "▁concealed", -13.150525093078613 ], [ "thèque", -13.150553703308105 ], [ "felony", -13.150579452514648 ], [ "▁tweeted", -13.150613784790039 ], [ "OTA", -13.150619506835938 ], [ "nsel", -13.150664329528809 ], [ "▁coarse", -13.150664329528809 ], [ "▁identificat", -13.150707244873047 ], [ "▁variability", -13.150716781616211 ], [ "civ", -13.150843620300293 ], [ "▁drastic", -13.150956153869629 ], [ "▁hatred", -13.151090621948242 ], [ "▁Bürgermeister", -13.151237487792969 ], [ "▁utilizatorilor", -13.15124225616455 ], [ "OULD", -13.15137004852295 ], [ "rmaßen", -13.151383399963379 ], [ "▁windshield", -13.151530265808105 ], [ "▁Particular", -13.151531219482422 ], [ "▁Tunnel", -13.151638984680176 ], [ "▁litri", -13.15164852142334 ], [ "extrême", -13.15180492401123 ], [ "▁Schalt", -13.151944160461426 ], [ "paket", -13.152159690856934 ], [ "berlin", -13.152169227600098 ], [ "▁slujb", -13.152193069458008 ], [ "facilitated", -13.152206420898438 ], [ "Congressional", -13.152510643005371 ], [ "▁honeymoon", -13.152585983276367 ], [ "▁Provision", -13.152697563171387 ], [ "▁Outfit", -13.152779579162598 ], [ "udder", -13.152814865112305 ], [ "▁chandelier", -13.153002738952637 ], [ "donating", -13.153132438659668 ], [ "historic", -13.15333080291748 ], [ "organized", -13.153508186340332 ], [ "(8)", -13.15356731414795 ], [ "▁touristique", -13.153610229492188 ], [ "▁Roosevelt", -13.153643608093262 ], [ "▁Verständnis", -13.153643608093262 ], [ "▁prilej", -13.153655052185059 ], [ "Vanity", -13.153806686401367 ], [ "chilly", -13.153964042663574 ], [ "loyer", -13.154031753540039 ], [ "▁Zhang", -13.154053688049316 ], [ "▁Nouveau", -13.154193878173828 ], [ "Soft", -13.154326438903809 ], [ "▁motherboard", -13.15441608428955 ], [ "▁Erklärung", -13.154701232910156 ], [ "▁Tasmania", -13.154702186584473 ], [ "▁verändern", -13.154703140258789 ], [ "▁seldom", -13.154711723327637 ], [ "▁Karriere", -13.154714584350586 ], [ "▁Mixed", -13.154902458190918 ], [ "umfang", -13.154970169067383 ], [ "▁Strategies", -13.155035972595215 ], [ "CHAR", -13.155051231384277 ], [ "olitary", -13.155075073242188 ], [ "▁Persoan", -13.1550874710083 ], [ "bewegung", -13.155242919921875 ], [ "▁Ernest", -13.155367851257324 ], [ "withdrawn", -13.155855178833008 ], [ "▁stationary", -13.155881881713867 ], [ "▁bland", -13.155939102172852 ], [ "▁Replace", -13.156059265136719 ], [ "▁Londres", -13.156290054321289 ], [ "▁plural", -13.156290054321289 ], [ "▁concentrat", -13.156515121459961 ], [ "Maschine", -13.156675338745117 ], [ "▁Advocate", -13.156820297241211 ], [ "▁vermitteln", -13.156824111938477 ], [ "▁dispenser", -13.156827926635742 ], [ "▁tedious", -13.15695858001709 ], [ "▁Straight", -13.15705394744873 ], [ "▁Corona", -13.157061576843262 ], [ "▁monumental", -13.157073020935059 ], [ "▁migrate", -13.15720272064209 ], [ "▁verlieren", -13.157366752624512 ], [ "▁Lub", -13.157482147216797 ], [ "▁reinforcement", -13.157827377319336 ], [ "▁cherish", -13.157843589782715 ], [ "Veterinary", -13.157881736755371 ], [ "geschwindigkeit", -13.157881736755371 ], [ "▁féminin", -13.157881736755371 ], [ "▁Facilities", -13.157964706420898 ], [ "▁urmari", -13.158050537109375 ], [ "▁Vertical", -13.158098220825195 ], [ "echoe", -13.158188819885254 ], [ "toured", -13.158548355102539 ], [ "Served", -13.158772468566895 ], [ "más", -13.158853530883789 ], [ "license", -13.158893585205078 ], [ "misunderstanding", -13.158944129943848 ], [ "▁glamorous", -13.158944129943848 ], [ "BJP", -13.158973693847656 ], [ "▁découvert", -13.159173965454102 ], [ "schönsten", -13.159517288208008 ], [ "▁(2018)", -13.159577369689941 ], [ "▁orasului", -13.159581184387207 ], [ "328", -13.159674644470215 ], [ "thighs", -13.159801483154297 ], [ "éclairage", -13.160008430480957 ], [ "Oamenii", -13.160009384155273 ], [ "▁Transmission", -13.16014575958252 ], [ "▁transpir", -13.16015911102295 ], [ "▁președinte", -13.160321235656738 ], [ "finalists", -13.160327911376953 ], [ "genügend", -13.160524368286133 ], [ "▁Aufmerksamkeit", -13.160539627075195 ], [ "▁unglaublich", -13.160539627075195 ], [ "▁descarc", -13.160604476928711 ], [ "▁Couch", -13.160683631896973 ], [ "eaucoup", -13.160788536071777 ], [ "▁adidas", -13.161075592041016 ], [ "▁1-800-", -13.161077499389648 ], [ "▁Communities", -13.161102294921875 ], [ "▁Einkommen", -13.161102294921875 ], [ "▁Reagan", -13.16114330291748 ], [ "▁Stoke", -13.161260604858398 ], [ "▁Snapchat", -13.161269187927246 ], [ "éclat", -13.161272048950195 ], [ "▁auseinander", -13.161367416381836 ], [ "▁richesse", -13.16137409210205 ], [ "▁toggle", -13.161396026611328 ], [ "▁Zutaten", -13.161606788635254 ], [ "▁député", -13.16161060333252 ], [ "▁battlefield", -13.161611557006836 ], [ "▁spirituel", -13.161611557006836 ], [ "▁Shuttle", -13.161632537841797 ], [ "▁Aktien", -13.161665916442871 ], [ "hormon", -13.161819458007812 ], [ "connection", -13.16187858581543 ], [ "▁vizitatori", -13.16191577911377 ], [ "érité", -13.161971092224121 ], [ "truck", -13.1619873046875 ], [ "▁yourselves", -13.162139892578125 ], [ "▁Logistics", -13.162140846252441 ], [ "coveted", -13.16215705871582 ], [ "▁şedinţ", -13.162671089172363 ], [ "▁messenger", -13.162703514099121 ], [ "▁țar", -13.162918090820312 ], [ "▁Grau", -13.163025856018066 ], [ "chirurgie", -13.163138389587402 ], [ "▁Ressourcen", -13.16320514678955 ], [ "▁Jésus", -13.163207054138184 ], [ "▁acțiune", -13.163208961486816 ], [ "▁Bundesliga", -13.163249015808105 ], [ "Lizenz", -13.163379669189453 ], [ "ELLE", -13.163908958435059 ], [ "vraie", -13.1639986038208 ], [ "ruined", -13.164018630981445 ], [ "▁Marble", -13.164109230041504 ], [ "▁Zambia", -13.164308547973633 ], [ "▁Finnish", -13.164366722106934 ], [ "▁trackback", -13.164488792419434 ], [ "héros", -13.16451644897461 ], [ "▁réclam", -13.164534568786621 ], [ "locurile", -13.164706230163574 ], [ "tägliche", -13.164753913879395 ], [ "IFF", -13.164824485778809 ], [ "▁contextual", -13.164938926696777 ], [ "▁Elvis", -13.165084838867188 ], [ "▁Batch", -13.165183067321777 ], [ "▁appris", -13.16519546508789 ], [ "intensive", -13.165404319763184 ], [ "▁întâmplat", -13.16565990447998 ], [ "▁prelucr", -13.16576099395752 ], [ "flore", -13.165873527526855 ], [ "▁Alkohol", -13.165877342224121 ], [ "Konzern", -13.165895462036133 ], [ "Delete", -13.166082382202148 ], [ "öck", -13.16612720489502 ], [ "▁clientii", -13.16614818572998 ], [ "▁innovate", -13.166224479675293 ], [ "▁ASAP", -13.166345596313477 ], [ "crumbs", -13.166425704956055 ], [ "reusable", -13.166489601135254 ], [ "▁Beaver", -13.166507720947266 ], [ "▁rosii", -13.166643142700195 ], [ "Arr", -13.166704177856445 ], [ "▁Zubehör", -13.166948318481445 ], [ "▁stolz", -13.166952133178711 ], [ "▁$75", -13.16695499420166 ], [ "▁Frühling", -13.166967391967773 ], [ "▁disagreement", -13.166988372802734 ], [ "▁formulate", -13.167381286621094 ], [ "braking", -13.167522430419922 ], [ "▁submarine", -13.167535781860352 ], [ "▁identificare", -13.167652130126953 ], [ "lansarea", -13.167659759521484 ], [ "covered", -13.167753219604492 ], [ "benso", -13.167859077453613 ], [ "▁situatie", -13.167989730834961 ], [ "hilf", -13.1681547164917 ], [ "▁Southampton", -13.168557167053223 ], [ "▁intéressé", -13.168557167053223 ], [ "▁congressional", -13.168572425842285 ], [ "65%", -13.168595314025879 ], [ "▁Allison", -13.168627738952637 ], [ "Mainland", -13.168726921081543 ], [ "▁touchscreen", -13.16882038116455 ], [ "leitet", -13.168922424316406 ], [ "mnului", -13.16958999633789 ], [ "▁engagiert", -13.169631004333496 ], [ "joacă", -13.16964340209961 ], [ "▁$5,000", -13.169652938842773 ], [ "upscale", -13.1697359085083 ], [ "▁vérité", -13.16983413696289 ], [ "flüssig", -13.170167922973633 ], [ "Richtlinie", -13.170169830322266 ], [ "▁positif", -13.170169830322266 ], [ "▁diferenta", -13.170175552368164 ], [ "▁întâi", -13.170707702636719 ], [ "ethylene", -13.170791625976562 ], [ "kreuz", -13.170913696289062 ], [ "Surely", -13.170990943908691 ], [ "puneti", -13.171002388000488 ], [ "europe", -13.171142578125 ], [ "▁comunist", -13.171271324157715 ], [ "unterricht", -13.171302795410156 ], [ "▁Füll", -13.171304702758789 ], [ "▁Aberdeen", -13.171792030334473 ], [ "▁DSLR", -13.171792030334473 ], [ "▁functioneaza", -13.171799659729004 ], [ "▁benches", -13.171807289123535 ], [ "▁Alpine", -13.171866416931152 ], [ "phthal", -13.172003746032715 ], [ "▁counselling", -13.17219066619873 ], [ "▁erzielen", -13.172323226928711 ], [ "▁părinţi", -13.172329902648926 ], [ "▁besitzen", -13.17236614227295 ], [ "heavenly", -13.172389030456543 ], [ "▁masque", -13.17281723022461 ], [ "▁Legislature", -13.172859191894531 ], [ "▁Recycling", -13.172861099243164 ], [ "▁Derma", -13.172883987426758 ], [ "reunite", -13.172926902770996 ], [ "recettes", -13.17310619354248 ], [ "converge", -13.173262596130371 ], [ "▁compoziti", -13.17327880859375 ], [ "▁Nürnberg", -13.173398971557617 ], [ "760", -13.173545837402344 ], [ "▁entière", -13.173674583435059 ], [ "▁parchment", -13.173944473266602 ], [ "▁Aufwand", -13.173945426940918 ], [ "▁antivirus", -13.174087524414062 ], [ "▁remettr", -13.17409610748291 ], [ "▁NEVER", -13.174243927001953 ], [ "▁restrictive", -13.174266815185547 ], [ "▁beurre", -13.174283027648926 ], [ "▁frigider", -13.174478530883789 ], [ "acquisition", -13.174642562866211 ], [ "▁Correct", -13.174866676330566 ], [ "▁immortal", -13.175017356872559 ], [ "▁occupancy", -13.175017356872559 ], [ "▁Tucson", -13.175019264221191 ], [ "▁Dhabi", -13.175025939941406 ], [ "obligation", -13.175033569335938 ], [ "▁warfare", -13.175037384033203 ], [ "▁syntax", -13.175045013427734 ], [ "APS", -13.175106048583984 ], [ "мен", -13.175209999084473 ], [ "▁diferenț", -13.175251960754395 ], [ "wordpress", -13.17549991607666 ], [ "▁Wohnzimmer", -13.175593376159668 ], [ "oppo", -13.175736427307129 ], [ "▁miscare", -13.175762176513672 ], [ "companiilor", -13.17581558227539 ], [ "▁bezahlt", -13.17584228515625 ], [ "Sterne", -13.175864219665527 ], [ "inability", -13.175898551940918 ], [ "▁Hoffnung", -13.176156044006348 ], [ "▁românească", -13.176176071166992 ], [ "document", -13.176177024841309 ], [ "borrowers", -13.17625904083252 ], [ "▁rasa", -13.176301956176758 ], [ "▁bénéfice", -13.176445960998535 ], [ "▁Panda", -13.17645263671875 ], [ "▁cărţi", -13.176730155944824 ], [ "▁Vorgehen", -13.17690658569336 ], [ "▁afecteaz", -13.176956176757812 ], [ "▁diagnos", -13.177050590515137 ], [ "▁Dentistry", -13.177180290222168 ], [ "▁staggering", -13.177180290222168 ], [ "präsident", -13.177181243896484 ], [ "▁vocational", -13.177239418029785 ], [ "Combined", -13.177287101745605 ], [ "stère", -13.177306175231934 ], [ "▁frunze", -13.177478790283203 ], [ "OLI", -13.177525520324707 ], [ "▁răc", -13.177752494812012 ], [ "▁changé", -13.177754402160645 ], [ "▁reprezentanți", -13.177757263183594 ], [ "▁ausgeschlossen", -13.177777290344238 ], [ "Windows", -13.177891731262207 ], [ "sometimes", -13.177898406982422 ], [ "▁dargestellt", -13.178120613098145 ], [ "provoking", -13.178263664245605 ], [ "terribly", -13.178264617919922 ], [ "▁speculate", -13.178274154663086 ], [ "▁complément", -13.178305625915527 ], [ "▁(2006)", -13.178306579589844 ], [ "zulegen", -13.178668022155762 ], [ "▁définitive", -13.178876876831055 ], [ "considerare", -13.17911148071289 ], [ "▁Subaru", -13.179354667663574 ], [ "WAN", -13.179390907287598 ], [ "guessed", -13.179417610168457 ], [ "spannung", -13.179479598999023 ], [ "▁supernatural", -13.179515838623047 ], [ "▁Interstate", -13.17957878112793 ], [ "▁redundant", -13.179891586303711 ], [ "▁HUG", -13.179893493652344 ], [ "▁restauration", -13.180006980895996 ], [ "repute", -13.180011749267578 ], [ "coagul", -13.180028915405273 ], [ "tehnologia", -13.18043327331543 ], [ "warded", -13.180444717407227 ], [ "▁lobster", -13.180469512939453 ], [ "▁Hafen", -13.180542945861816 ], [ "▁Guess", -13.18056583404541 ], [ "seraient", -13.181038856506348 ], [ "▁trench", -13.181156158447266 ], [ "▁piept", -13.181283950805664 ], [ "categorized", -13.181396484375 ], [ "softer", -13.1815185546875 ], [ "▁feasibility", -13.181519508361816 ], [ "▁restructuring", -13.181519508361816 ], [ "▁GOOD", -13.181537628173828 ], [ "▁inspiré", -13.181610107421875 ], [ "▁spéci", -13.18163013458252 ], [ "▁Mattress", -13.181686401367188 ], [ "▁biologique", -13.181702613830566 ], [ "▁Crema", -13.182043075561523 ], [ "▁korrekt", -13.182063102722168 ], [ "▁imperfect", -13.182205200195312 ], [ "▁advantageous", -13.182329177856445 ], [ "9.00", -13.182390213012695 ], [ "PAL", -13.182557106018066 ], [ "▁Illustration", -13.182607650756836 ], [ "▁Katherine", -13.182607650756836 ], [ "▁cervical", -13.182607650756836 ], [ "▁hectic", -13.182611465454102 ], [ "▁Belastung", -13.182615280151367 ], [ "▁Laguna", -13.182628631591797 ], [ "▁Burton", -13.182761192321777 ], [ "nettoyage", -13.182875633239746 ], [ "Toward", -13.183072090148926 ], [ "continuare", -13.183072090148926 ], [ "▁acumulat", -13.183106422424316 ], [ "▁déposé", -13.183216094970703 ], [ "▁prestige", -13.183269500732422 ], [ "▁LNG", -13.183525085449219 ], [ "▁Dacia", -13.183662414550781 ], [ "▁concede", -13.183691024780273 ], [ "▁reconciliation", -13.183822631835938 ], [ "Sistemul", -13.183877944946289 ], [ "Speed", -13.183937072753906 ], [ "▁Implant", -13.183977127075195 ], [ "▁möchtest", -13.184020042419434 ], [ "▁Norton", -13.184064865112305 ], [ "▁cosmic", -13.184181213378906 ], [ "enregistrement", -13.184247016906738 ], [ "țării", -13.18433952331543 ], [ "Veröffentlichung", -13.184786796569824 ], [ "erlebnis", -13.184786796569824 ], [ "▁Carpenter", -13.184786796569824 ], [ "▁INFORMATION", -13.184786796569824 ], [ "invites", -13.18481731414795 ], [ "▁gewan", -13.1849365234375 ], [ "▁réservé", -13.184986114501953 ], [ "▁aquatic", -13.184988021850586 ], [ "▁Seoul", -13.18507194519043 ], [ "▁älter", -13.185185432434082 ], [ "▁classmates", -13.185223579406738 ], [ "gelangen", -13.185253143310547 ], [ "▁Camill", -13.185285568237305 ], [ "simo", -13.185291290283203 ], [ "▁dormitor", -13.185333251953125 ], [ "wahren", -13.185354232788086 ], [ "▁incremental", -13.185357093811035 ], [ "▁caci", -13.185494422912598 ], [ "mittlere", -13.185752868652344 ], [ "▁condominium", -13.185877799987793 ], [ "▁rainforest", -13.185877799987793 ], [ "▁championnat", -13.185891151428223 ], [ "▁interrupted", -13.185921669006348 ], [ "▁tactile", -13.185930252075195 ], [ "▁unconditional", -13.185945510864258 ], [ "▁reactive", -13.186041831970215 ], [ "▁Stretch", -13.1861572265625 ], [ "▁serene", -13.18624210357666 ], [ "570", -13.186318397521973 ], [ "igte", -13.186376571655273 ], [ "Louis", -13.186410903930664 ], [ "▁Mittelpunkt", -13.186493873596191 ], [ "EEP", -13.18651294708252 ], [ "▁vault", -13.186552047729492 ], [ "absolu", -13.186893463134766 ], [ "▁solidarity", -13.186971664428711 ], [ "CLICK", -13.18708324432373 ], [ "▁hustle", -13.187090873718262 ], [ "▁microscope", -13.187105178833008 ], [ "▁Recommended", -13.187111854553223 ], [ "âche", -13.18716812133789 ], [ "▁flashlight", -13.187286376953125 ], [ "modificarea", -13.18754768371582 ], [ "izaţi", -13.18773078918457 ], [ "planned", -13.187899589538574 ], [ "Download", -13.187906265258789 ], [ "▁gourmand", -13.188064575195312 ], [ "▁subsidiaries", -13.188064575195312 ], [ "orthodox", -13.188135147094727 ], [ "▁Auburn", -13.188323020935059 ], [ "▁exprimat", -13.188336372375488 ], [ "procédé", -13.18861198425293 ], [ "▁ressenti", -13.188648223876953 ], [ "▁stint", -13.188678741455078 ], [ "Essentially", -13.189072608947754 ], [ "▁Savior", -13.189164161682129 ], [ "▁Flood", -13.189168930053711 ], [ "▁neurological", -13.189249038696289 ], [ "▁strig", -13.189340591430664 ], [ "scended", -13.189421653747559 ], [ "▁Shiva", -13.189483642578125 ], [ "▁Sketch", -13.189544677734375 ], [ "▁monarch", -13.18956184387207 ], [ "▁Preview", -13.189632415771484 ], [ "▁bewegt", -13.189811706542969 ], [ "mapped", -13.189818382263184 ], [ "énorme", -13.189962387084961 ], [ "▁définition", -13.189963340759277 ], [ "▁nécessité", -13.189984321594238 ], [ "▁antren", -13.190027236938477 ], [ "▁Infant", -13.190072059631348 ], [ "▁incumbent", -13.190255165100098 ], [ "▁pavilion", -13.190255165100098 ], [ "▁Taliban", -13.19025707244873 ], [ "Easily", -13.19025993347168 ], [ "▁verteilt", -13.19030475616455 ], [ "▁Biblical", -13.190320014953613 ], [ "Christian", -13.190333366394043 ], [ "județul", -13.190436363220215 ], [ "Learning", -13.19046688079834 ], [ "▁Expand", -13.19054126739502 ], [ "▁Attach", -13.19056224822998 ], [ "consideră", -13.190573692321777 ], [ "einsatz", -13.190574645996094 ], [ "Numai", -13.190585136413574 ], [ "▁Eintrag", -13.190597534179688 ], [ "▁üblich", -13.190607070922852 ], [ "▁cumpără", -13.19062614440918 ], [ "escaped", -13.190693855285645 ], [ "▁Ortodox", -13.190804481506348 ], [ "▁obţinut", -13.190805435180664 ], [ "ecluded", -13.191036224365234 ], [ "▁brownie", -13.191089630126953 ], [ "▁regulament", -13.191253662109375 ], [ "▁Chaos", -13.191302299499512 ], [ "▁masiv", -13.19132137298584 ], [ "▁Gerald", -13.191376686096191 ], [ "▁Sigur", -13.191380500793457 ], [ "▁wavelength", -13.191380500793457 ], [ "▁retiring", -13.191396713256836 ], [ "▁exactement", -13.191819190979004 ], [ "ntino", -13.191823959350586 ], [ "▁Krebs", -13.19194221496582 ], [ "▁monatlich", -13.191956520080566 ], [ "▁aranj", -13.192011833190918 ], [ "▁priveşt", -13.192099571228027 ], [ "▁mecanic", -13.192109107971191 ], [ "money", -13.192233085632324 ], [ "parliamentary", -13.1922607421875 ], [ "▁probation", -13.192427635192871 ], [ "embroidered", -13.192451477050781 ], [ "▁amenajat", -13.192451477050781 ], [ "▁remnant", -13.192451477050781 ], [ "▁senzati", -13.192472457885742 ], [ "▁Declaration", -13.192483901977539 ], [ "farbe", -13.192506790161133 ], [ "▁skinny", -13.19260311126709 ], [ "Energi", -13.192648887634277 ], [ "verhältnisse", -13.19288158416748 ], [ "Recruit", -13.192972183227539 ], [ "frying", -13.193161010742188 ], [ "925", -13.193294525146484 ], [ "nstruire", -13.193302154541016 ], [ "toasted", -13.193424224853516 ], [ "▁nicotine", -13.193551063537598 ], [ "recessed", -13.193570137023926 ], [ "▁dialect", -13.193572044372559 ], [ "▁confisc", -13.193575859069824 ], [ "▁bubbl", -13.193643569946289 ], [ "▁Precision", -13.193682670593262 ], [ "▁sollicit", -13.193842887878418 ], [ "▁Moral", -13.193977355957031 ], [ "▁renseignements", -13.194112777709961 ], [ "UMP", -13.194116592407227 ], [ "ijn", -13.194183349609375 ], [ "▁fermeture", -13.194320678710938 ], [ "▁blueprint", -13.19462776184082 ], [ "▁groceries", -13.194652557373047 ], [ "möbel", -13.194655418395996 ], [ "▁Plenty", -13.194657325744629 ], [ "▁forfeit", -13.194719314575195 ], [ "méthodes", -13.194915771484375 ], [ "paving", -13.19493293762207 ], [ "outheastern", -13.194979667663574 ], [ "▁Overview", -13.19503116607666 ], [ "▁observers", -13.195171356201172 ], [ "▁Timișoara", -13.19520378112793 ], [ "noticing", -13.195332527160645 ], [ "▁Owl", -13.195381164550781 ], [ "▁1925", -13.195517539978027 ], [ "▁prüfen", -13.195755004882812 ], [ "▁Bewohner", -13.195756912231445 ], [ "▁Latvia", -13.195770263671875 ], [ "▁Tuscan", -13.19577407836914 ], [ "▁apprenticeship", -13.195789337158203 ], [ "▁courteous", -13.1958646774292 ], [ "adult", -13.196023941040039 ], [ "Licensed", -13.196029663085938 ], [ "abused", -13.196762084960938 ], [ "confidence", -13.19678020477295 ], [ "▁revolt", -13.196782112121582 ], [ "conference", -13.196861267089844 ], [ "genoss", -13.196914672851562 ], [ "▁răni", -13.196944236755371 ], [ "▁Intervention", -13.196949005126953 ], [ "▁primesc", -13.196969985961914 ], [ "trays", -13.197041511535645 ], [ "nozzle", -13.197216033935547 ], [ "▁splitting", -13.197443962097168 ], [ "▁könne", -13.197507858276367 ], [ "▁peisaj", -13.197943687438965 ], [ "▁academia", -13.197962760925293 ], [ "▁chakra", -13.197979927062988 ], [ "▁Abdul", -13.1981201171875 ], [ "▁Beschreibung", -13.198225021362305 ], [ "Regeln", -13.19831371307373 ], [ "eezy", -13.198314666748047 ], [ "▁problématique", -13.198515892028809 ], [ "▁Ausführung", -13.198524475097656 ], [ "▁reconnect", -13.19868278503418 ], [ "▁telefonic", -13.198966026306152 ], [ "▁Ethereum", -13.199069023132324 ], [ "▁Winnipeg", -13.199069023132324 ], [ "▁misconception", -13.199069023132324 ], [ "▁Verpackung", -13.199070930480957 ], [ "▁erzeugt", -13.199097633361816 ], [ "▁Identity", -13.199104309082031 ], [ "▁dunkle", -13.199109077453613 ], [ "sustaining", -13.19916820526123 ], [ "▁pereche", -13.199178695678711 ], [ "▁neîn", -13.199239730834961 ], [ "directorul", -13.199291229248047 ], [ "▁élabor", -13.199584007263184 ], [ "▁Hollow", -13.19960880279541 ], [ "▁getestet", -13.199751853942871 ], [ "▁Promote", -13.199797630310059 ], [ "agriculture", -13.199920654296875 ], [ "▁deosebir", -13.199934005737305 ], [ "▁neam", -13.199999809265137 ], [ "aufbau", -13.200042724609375 ], [ "▁susținut", -13.200079917907715 ], [ "fueled", -13.200119018554688 ], [ "▁impresionant", -13.200177192687988 ], [ "innate", -13.20026969909668 ], [ "grenzt", -13.200340270996094 ], [ "rescued", -13.200514793395996 ], [ "bestand", -13.200559616088867 ], [ "▁adjunct", -13.200729370117188 ], [ "▁Mischung", -13.200754165649414 ], [ "▁Lease", -13.201258659362793 ], [ "espagnol", -13.201284408569336 ], [ "▁Kickstarter", -13.201284408569336 ], [ "▁buzunar", -13.201284408569336 ], [ "▁buddies", -13.20129108428955 ], [ "käufe", -13.201485633850098 ], [ "cevoir", -13.201582908630371 ], [ "▁creşte", -13.201675415039062 ], [ "▁Cluster", -13.201825141906738 ], [ "▁obișnui", -13.201838493347168 ], [ "▁cassette", -13.201889038085938 ], [ "▁optisch", -13.201947212219238 ], [ "manned", -13.20200252532959 ], [ "schneid", -13.202362060546875 ], [ "Württemberg", -13.202393531799316 ], [ "shredded", -13.202393531799316 ], [ "▁botanical", -13.20239543914795 ], [ "characterization", -13.202445983886719 ], [ "▁Durchführung", -13.202452659606934 ], [ "▁tireless", -13.20250129699707 ], [ "lässlich", -13.20254135131836 ], [ "▁Merchant", -13.202570915222168 ], [ "joutez", -13.20259952545166 ], [ "▁amélior", -13.202676773071289 ], [ "fixed", -13.202741622924805 ], [ "kho", -13.202760696411133 ], [ "▁televizor", -13.202948570251465 ], [ "▁Davies", -13.202964782714844 ], [ "enceinte", -13.203118324279785 ], [ "▁Panorama", -13.20350456237793 ], [ "▁maternal", -13.203507423400879 ], [ "diversified", -13.203513145446777 ], [ "▁Jü", -13.203570365905762 ], [ "▁naz", -13.203730583190918 ], [ "▁plonge", -13.2039213180542 ], [ "geschickt", -13.203944206237793 ], [ "MIS", -13.204215049743652 ], [ "ragged", -13.204553604125977 ], [ "▁diarrhea", -13.20461654663086 ], [ "▁tsunami", -13.20461654663086 ], [ "▁Nikola", -13.204625129699707 ], [ "▁festivities", -13.20464038848877 ], [ "potting", -13.20479965209961 ], [ "▁telefonisch", -13.204874038696289 ], [ "TAR", -13.204971313476562 ], [ "▁schimbări", -13.205023765563965 ], [ "▁occidental", -13.205172538757324 ], [ "schloss", -13.205179214477539 ], [ "Print", -13.205284118652344 ], [ "▁autoritățil", -13.205361366271973 ], [ "idos", -13.20556640625 ], [ "mediocr", -13.20559310913086 ], [ "▁Decla", -13.205686569213867 ], [ "▁Elliott", -13.205729484558105 ], [ "▁pinpoint", -13.205734252929688 ], [ "▁disciple", -13.20579719543457 ], [ "▁Cairo", -13.2058744430542 ], [ "▁15-20", -13.2059326171875 ], [ "▁limbaj", -13.20611572265625 ], [ "▁retenu", -13.206154823303223 ], [ "▁Blüte", -13.20628833770752 ], [ "▁MINI", -13.206467628479004 ], [ "▁lumină", -13.206567764282227 ], [ "▁flawed", -13.206846237182617 ], [ "▁Belarus", -13.207067489624023 ], [ "Totul", -13.207207679748535 ], [ "hôte", -13.207273483276367 ], [ "▁verbringen", -13.207315444946289 ], [ "▁simultaneous", -13.207344055175781 ], [ "▁competiți", -13.207402229309082 ], [ "▁lancement", -13.207413673400879 ], [ "▁proprietati", -13.207432746887207 ], [ "▁angajator", -13.207465171813965 ], [ "▁ignorant", -13.207674026489258 ], [ "▁indicative", -13.207700729370117 ], [ "▁Bearbeitung", -13.207961082458496 ], [ "▁Ungaria", -13.207961082458496 ], [ "▁Sfint", -13.208015441894531 ], [ "▁Trojan", -13.20804214477539 ], [ "▁1911", -13.208100318908691 ], [ "▁reliabl", -13.2081937789917 ], [ "6-0", -13.20827865600586 ], [ "obst", -13.208523750305176 ], [ "▁relève", -13.208579063415527 ], [ "▁standpoint", -13.208874702453613 ], [ "ridden", -13.208918571472168 ], [ "▁Pdf", -13.209005355834961 ], [ "tatewide", -13.209051132202148 ], [ "Water", -13.209062576293945 ], [ "▁Pricing", -13.209089279174805 ], [ "▁protecţi", -13.209168434143066 ], [ "November", -13.209615707397461 ], [ "▁televiziune", -13.20964241027832 ], [ "Sodium", -13.209881782531738 ], [ "douceur", -13.209942817687988 ], [ "▁Flasche", -13.210183143615723 ], [ "3.9", -13.210193634033203 ], [ "▁electromagnetic", -13.210195541381836 ], [ "▁mitochondria", -13.210195541381836 ], [ "Suddenly", -13.210199356079102 ], [ "▁Drupal", -13.210201263427734 ], [ "▁supraveghere", -13.210211753845215 ], [ "▁cornea", -13.210288047790527 ], [ "räumt", -13.210309982299805 ], [ "▁healed", -13.210410118103027 ], [ "Roc", -13.210649490356445 ], [ "▁temporar", -13.210707664489746 ], [ "▁amaze", -13.210770606994629 ], [ "▁confrunta", -13.210833549499512 ], [ "Afterward", -13.210836410522461 ], [ "▁festgelegt", -13.21084213256836 ], [ "▁Kuchen", -13.210844993591309 ], [ "▁perpetual", -13.210858345031738 ], [ "systematically", -13.211000442504883 ], [ "▁coloan", -13.211006164550781 ], [ "▁extensi", -13.211058616638184 ], [ "▁Județean", -13.211315155029297 ], [ "▁amelior", -13.211315155029297 ], [ "▁illustrator", -13.211315155029297 ], [ "▁titanium", -13.211344718933105 ], [ "SMEs", -13.211384773254395 ], [ "taxable", -13.211578369140625 ], [ "▁Borough", -13.211607933044434 ], [ "verlust", -13.211772918701172 ], [ "ductive", -13.21233081817627 ], [ "▁Küste", -13.212335586547852 ], [ "▁végétal", -13.212410926818848 ], [ "▁breastfeeding", -13.212435722351074 ], [ "▁captivating", -13.212435722351074 ], [ "▁Chevy", -13.212443351745605 ], [ "▁aerospace", -13.212469100952148 ], [ "pozitia", -13.213095664978027 ], [ "Tutor", -13.213199615478516 ], [ "▁spum", -13.213312149047852 ], [ "curând", -13.213419914245605 ], [ "iscus", -13.213458061218262 ], [ "October", -13.213495254516602 ], [ "▁Reparatur", -13.213557243347168 ], [ "▁Servicii", -13.213574409484863 ], [ "▁Gonz", -13.21357536315918 ], [ "▁cybersecurity", -13.21357536315918 ], [ "▁UCLA", -13.213678359985352 ], [ "rissa", -13.213835716247559 ], [ "▁Kemp", -13.213850021362305 ], [ "▁piston", -13.214046478271484 ], [ "▁révèle", -13.214118957519531 ], [ "▁posséd", -13.21412181854248 ], [ "▁versehen", -13.214129447937012 ], [ "▁scrutin", -13.214226722717285 ], [ "donnant", -13.21436882019043 ], [ "▁Geschwindigkeit", -13.214680671691895 ], [ "▁Panasonic", -13.214680671691895 ], [ "audio", -13.214700698852539 ], [ "▁Packaging", -13.214771270751953 ], [ "phra", -13.2147798538208 ], [ "▁Letzte", -13.214954376220703 ], [ "insicht", -13.215141296386719 ], [ "▁sammeln", -13.215243339538574 ], [ "▁extins", -13.215259552001953 ], [ "▁collège", -13.215266227722168 ], [ "ancies", -13.215343475341797 ], [ "▁întâlnit", -13.215350151062012 ], [ "▁Servi", -13.215392112731934 ], [ "stattet", -13.215493202209473 ], [ "▁abstraction", -13.215566635131836 ], [ "▁candidature", -13.215592384338379 ], [ "ONU", -13.215676307678223 ], [ "▁raffle", -13.215826988220215 ], [ "▁Soldier", -13.215834617614746 ], [ "▁stipulate", -13.215883255004883 ], [ "▁vizual", -13.215950012207031 ], [ "lucht", -13.216007232666016 ], [ "▁circus", -13.216068267822266 ], [ "▁decree", -13.216259002685547 ], [ "immeuble", -13.216367721557617 ], [ "Store", -13.216426849365234 ], [ "randul", -13.216622352600098 ], [ "▁narration", -13.216933250427246 ], [ "implication", -13.216958045959473 ], [ "▁discontinued", -13.216971397399902 ], [ "▁Pilates", -13.216989517211914 ], [ "▁biais", -13.21701431274414 ], [ "panel", -13.217325210571289 ], [ "▁mower", -13.217458724975586 ], [ "▁Castro", -13.21753978729248 ], [ "pregătire", -13.217641830444336 ], [ "▁denomination", -13.218062400817871 ], [ "▁throttle", -13.21806526184082 ], [ "▁finition", -13.218086242675781 ], [ "▁clarification", -13.218286514282227 ], [ "laut", -13.218366622924805 ], [ "▁wastewater", -13.2184419631958 ], [ "▁Sanchez", -13.218770980834961 ], [ "▁Umfeld", -13.2189359664917 ], [ "▁consili", -13.218997955322266 ], [ "extrait", -13.219013214111328 ], [ "ionism", -13.2190523147583 ], [ "▁Cannabis", -13.219186782836914 ], [ "▁misconduct", -13.219186782836914 ], [ "▁shepherd", -13.219186782836914 ], [ "▁feminist", -13.21919059753418 ], [ "▁criterii", -13.219212532043457 ], [ "America", -13.219219207763672 ], [ "▁Telephone", -13.219270706176758 ], [ "▁Fritz", -13.219438552856445 ], [ "▁cheltui", -13.219794273376465 ], [ "▁Übung", -13.219857215881348 ], [ "făcută", -13.22006893157959 ], [ "▁străzi", -13.220170021057129 ], [ "influencing", -13.220315933227539 ], [ "▁Democracy", -13.220321655273438 ], [ "atorium", -13.220376014709473 ], [ "▁Stufe", -13.220465660095215 ], [ "▁Cornell", -13.220660209655762 ], [ "zugehen", -13.22074031829834 ], [ "▁coton", -13.220804214477539 ], [ "▁beinhaltet", -13.220881462097168 ], [ "▁kritisch", -13.220884323120117 ], [ "▁Kalender", -13.22105884552002 ], [ "▁Teig", -13.221253395080566 ], [ "cooked", -13.221264839172363 ], [ "▁diversité", -13.221390724182129 ], [ "recognizable", -13.221446990966797 ], [ "▁Dictionary", -13.221446990966797 ], [ "attribution", -13.22145938873291 ], [ "▁Teresa", -13.221471786499023 ], [ "▁Ahmad", -13.221487998962402 ], [ "HAM", -13.221627235412598 ], [ "▁floss", -13.221668243408203 ], [ "génie", -13.2218599319458 ], [ "▁Espa", -13.221989631652832 ], [ "hersteller", -13.221993446350098 ], [ "Musée", -13.222001075744629 ], [ "▁Crawford", -13.222579002380371 ], [ "▁Phantom", -13.222579002380371 ], [ "▁Jenkins", -13.222640037536621 ], [ "genauer", -13.222774505615234 ], [ "▁acţiuni", -13.222885131835938 ], [ "▁meciuri", -13.22322940826416 ], [ "▁verstärkt", -13.22326374053955 ], [ "▁troop", -13.22341251373291 ], [ "räder", -13.223483085632324 ], [ "Putting", -13.223536491394043 ], [ "NASDAQ", -13.223712921142578 ], [ "▁Buddhism", -13.223712921142578 ], [ "▁Religious", -13.223712921142578 ], [ "▁accommodating", -13.223712921142578 ], [ "▁lendemain", -13.223712921142578 ], [ "▁plywood", -13.223714828491211 ], [ "▁inflatable", -13.223724365234375 ], [ "▁sèche", -13.223731994628906 ], [ "▁fragil", -13.223845481872559 ], [ "▁Filip", -13.224115371704102 ], [ "▁Terrace", -13.224274635314941 ], [ "Biblio", -13.22432804107666 ], [ "resides", -13.22448444366455 ], [ "▁varf", -13.22451114654541 ], [ "Bildern", -13.224528312683105 ], [ "loß", -13.224685668945312 ], [ "555", -13.224702835083008 ], [ "▁astounding", -13.224847793579102 ], [ "▁brillant", -13.224857330322266 ], [ "▁Railroad", -13.224871635437012 ], [ "minimizing", -13.224907875061035 ], [ "▁Benedict", -13.225019454956055 ], [ "▁$400", -13.225068092346191 ], [ "▁schematic", -13.225217819213867 ], [ "Canada", -13.225371360778809 ], [ "▁psihic", -13.225415229797363 ], [ "▁avertiz", -13.225497245788574 ], [ "▁Breed", -13.225550651550293 ], [ "▁gradina", -13.225606918334961 ], [ "▁Liege", -13.225822448730469 ], [ "▁Retirement", -13.225983619689941 ], [ "▁pergola", -13.226005554199219 ], [ "▁Kuwait", -13.2260103225708 ], [ "▁logistic", -13.22629451751709 ], [ "▁captive", -13.22651481628418 ], [ "prepared", -13.226568222045898 ], [ "▁prononc", -13.226568222045898 ], [ "Celui", -13.226676940917969 ], [ "deutschland", -13.227120399475098 ], [ "▁devreme", -13.227124214172363 ], [ "▁părți", -13.227270126342773 ], [ "▁1934", -13.227517127990723 ], [ "▁ersetzt", -13.227560997009277 ], [ "▁frightening", -13.227689743041992 ], [ "▁fiecărui", -13.227819442749023 ], [ "correct", -13.22799015045166 ], [ "6.6", -13.228057861328125 ], [ "▁Manitoba", -13.228259086608887 ], [ "Chartered", -13.228416442871094 ], [ "▁părăs", -13.228543281555176 ], [ "Powered", -13.228697776794434 ], [ "impede", -13.22876262664795 ], [ "agonist", -13.22878646850586 ], [ "▁stratégique", -13.228829383850098 ], [ "▁vigilant", -13.228830337524414 ], [ "faceted", -13.228930473327637 ], [ "available", -13.229308128356934 ], [ "▁Promise", -13.229388236999512 ], [ "▁humorous", -13.229446411132812 ], [ "treibt", -13.229449272155762 ], [ "▁Patrol", -13.229514122009277 ], [ "huh", -13.229523658752441 ], [ "ztlich", -13.229804039001465 ], [ "▁rejet", -13.2299165725708 ], [ "odeur", -13.229935646057129 ], [ "usziehbar", -13.22996997833252 ], [ "▁gespannt", -13.229972839355469 ], [ "church", -13.230018615722656 ], [ "▁Popescu", -13.230109214782715 ], [ "▁einmalig", -13.230518341064453 ], [ "diluted", -13.230551719665527 ], [ "lighted", -13.231070518493652 ], [ "▁stattfinden", -13.23111343383789 ], [ "▁Reaktion", -13.231183052062988 ], [ "▁délivr", -13.23134994506836 ], [ "▁Helfer", -13.231407165527344 ], [ "Fiind", -13.23142147064209 ], [ "rmând", -13.231507301330566 ], [ "▁Beweis", -13.231671333312988 ], [ "▁Violet", -13.231733322143555 ], [ "kamera", -13.231764793395996 ], [ "▁Romney", -13.231779098510742 ], [ "▁Bradford", -13.231800079345703 ], [ "stellbar", -13.231852531433105 ], [ "▁roadmap", -13.231921195983887 ], [ "▁subconscious", -13.23204231262207 ], [ "contrasting", -13.232138633728027 ], [ "mécanisme", -13.232254981994629 ], [ "kämpft", -13.232255935668945 ], [ "▁Preston", -13.232719421386719 ], [ "▁Anliegen", -13.232802391052246 ], [ "▁necessities", -13.232827186584473 ], [ "▁detrimental", -13.232828140258789 ], [ "▁sprawl", -13.232830047607422 ], [ "▁Erfüllung", -13.23287582397461 ], [ "▁massacre", -13.2329683303833 ], [ "▁pietre", -13.232987403869629 ], [ "▁situații", -13.233027458190918 ], [ "vêtement", -13.233080863952637 ], [ "Listed", -13.233144760131836 ], [ "▁extravagant", -13.233399391174316 ], [ "▁axle", -13.233525276184082 ], [ "OTT", -13.233663558959961 ], [ "wildly", -13.233744621276855 ], [ "70,000", -13.233797073364258 ], [ "▁chauffeur", -13.23384952545166 ], [ "▁Brasov", -13.233972549438477 ], [ "▁Fähigkeiten", -13.233972549438477 ], [ "▁staatlich", -13.234025001525879 ], [ "outlines", -13.234034538269043 ], [ "▁aufmerksam", -13.234545707702637 ], [ "▁Relation", -13.234749794006348 ], [ "▁Stephan", -13.234947204589844 ], [ "yland", -13.23494815826416 ], [ "proclaimed", -13.235086441040039 ], [ "Wallet", -13.235100746154785 ], [ "verarbeitung", -13.235118865966797 ], [ "▁überraschen", -13.235118865966797 ], [ "▁Injury", -13.235125541687012 ], [ "▁horsepower", -13.235237121582031 ], [ "▁Tropical", -13.23523998260498 ], [ "▁wives", -13.235459327697754 ], [ "adherence", -13.235677719116211 ], [ "schätzung", -13.235692977905273 ], [ "▁coherent", -13.235708236694336 ], [ "parlament", -13.23574161529541 ], [ "▁stup", -13.235852241516113 ], [ "▁resonance", -13.23626708984375 ], [ "▁inheritance", -13.236355781555176 ], [ "commenced", -13.23645305633545 ], [ "▁supervise", -13.236475944519043 ], [ "▁facilitator", -13.236488342285156 ], [ "fares", -13.236678123474121 ], [ "▁Tibet", -13.23672866821289 ], [ "communication", -13.236787796020508 ], [ "yog", -13.236806869506836 ], [ "▁WLAN", -13.236842155456543 ], [ "▁Chili", -13.23685073852539 ], [ "▁Harold", -13.2369966506958 ], [ "▁Guerre", -13.237005233764648 ], [ "▁Femme", -13.237146377563477 ], [ "▁Lisbon", -13.237231254577637 ], [ "▁mulțumi", -13.237415313720703 ], [ "▁vorbereitet", -13.237415313720703 ], [ "▁aperture", -13.237422943115234 ], [ "▁Universities", -13.237442016601562 ], [ "▁reckless", -13.237471580505371 ], [ "▁Botschaft", -13.237533569335938 ], [ "▁Squad", -13.238022804260254 ], [ "▁buoy", -13.238061904907227 ], [ "participarea", -13.238236427307129 ], [ "stiinta", -13.238389015197754 ], [ "▁repeal", -13.238415718078613 ], [ "drilled", -13.238489151000977 ], [ "▁Conversation", -13.238567352294922 ], [ "▁subsid", -13.238615036010742 ], [ "anstalt", -13.238741874694824 ], [ "faktor", -13.23874282836914 ], [ "▁swamp", -13.238790512084961 ], [ "pflichtig", -13.238921165466309 ], [ "▁camion", -13.238970756530762 ], [ "▁gouvern", -13.239032745361328 ], [ "▁archaeological", -13.239141464233398 ], [ "▁glitch", -13.239198684692383 ], [ "average", -13.239294052124023 ], [ "▁coffre", -13.239481925964355 ], [ "▁Insert", -13.239513397216797 ], [ "▁colonne", -13.2395601272583 ], [ "▁Assess", -13.23962116241455 ], [ "▁batches", -13.239716529846191 ], [ "▁ammunition", -13.239717483520508 ], [ "▁scissors", -13.239717483520508 ], [ "▁Locksmith", -13.239740371704102 ], [ "▁Bollywood", -13.239991188049316 ], [ "expédi", -13.240288734436035 ], [ "▁descendants", -13.24039363861084 ], [ "▁unwilling", -13.240506172180176 ], [ "▁Noise", -13.240649223327637 ], [ "▁Directive", -13.240660667419434 ], [ "ATOR", -13.240765571594238 ], [ "▁Rajasthan", -13.240870475769043 ], [ "▁chaotic", -13.240888595581055 ], [ "▁NEED", -13.24093246459961 ], [ "▁părere", -13.24095344543457 ], [ "▁begonnen", -13.241448402404785 ], [ "▁Reef", -13.241504669189453 ], [ "▁vorgesehen", -13.24161434173584 ], [ "▁allocate", -13.241826057434082 ], [ "▁exceptionnel", -13.241936683654785 ], [ "▁gefertigt", -13.24203872680664 ], [ "fading", -13.242072105407715 ], [ "▁interpersonal", -13.242178916931152 ], [ "▁occupie", -13.242204666137695 ], [ "▁Teatr", -13.242579460144043 ], [ "▁kilomètres", -13.242603302001953 ], [ "▁verbinden", -13.242608070373535 ], [ "▁Frucht", -13.242643356323242 ], [ "augmented", -13.242720603942871 ], [ "▁twentieth", -13.243181228637695 ], [ "▁aggression", -13.243183135986328 ], [ "▁Miracle", -13.243184089660645 ], [ "▁peninsula", -13.243184089660645 ], [ "▁Fernando", -13.243185043334961 ], [ "▁autorităţil", -13.243203163146973 ], [ "▁Iisus", -13.243217468261719 ], [ "▁puck", -13.243423461914062 ], [ "titel", -13.243454933166504 ], [ "▁remake", -13.243562698364258 ], [ "freiheit", -13.243563652038574 ], [ "▁Belize", -13.243590354919434 ], [ "▁secundar", -13.243779182434082 ], [ "▁perpetrat", -13.243786811828613 ], [ "jedenfalls", -13.243797302246094 ], [ "linked", -13.243820190429688 ], [ "▁dégag", -13.243918418884277 ], [ "LAY", -13.243926048278809 ], [ "behandlung", -13.244172096252441 ], [ "▁1928", -13.244193077087402 ], [ "▁Nickel", -13.244205474853516 ], [ "rophy", -13.244256973266602 ], [ "▁autonomy", -13.244338989257812 ], [ "▁Treffen", -13.244402885437012 ], [ "▁groundbreaking", -13.24445915222168 ], [ "politisch", -13.244484901428223 ], [ "▁Vector", -13.244553565979004 ], [ "oricine", -13.244684219360352 ], [ "utilisées", -13.244684219360352 ], [ "plete", -13.244771003723145 ], [ "droht", -13.244918823242188 ], [ "▁alternativ", -13.245104789733887 ], [ "▁Bernie", -13.245213508605957 ], [ "▁embellish", -13.245260238647461 ], [ "▁Curriculum", -13.24549674987793 ], [ "herrscht", -13.245525360107422 ], [ "escalier", -13.246126174926758 ], [ "hian", -13.246333122253418 ], [ "ertaining", -13.246387481689453 ], [ "hitter", -13.246430397033691 ], [ "▁kompetente", -13.24665641784668 ], [ "▁trekking", -13.246760368347168 ], [ "EACH", -13.246841430664062 ], [ "▁Bedien", -13.2470703125 ], [ "starred", -13.247169494628906 ], [ "▁săptămâna", -13.247236251831055 ], [ "▁Gratuit", -13.247239112854004 ], [ "▁Jahrzehnte", -13.247241020202637 ], [ "ingénieur", -13.24731731414795 ], [ "▁Huang", -13.24736213684082 ], [ "Music", -13.247401237487793 ], [ "misiei", -13.247544288635254 ], [ "▁masuri", -13.247733116149902 ], [ "▁Achievement", -13.247817039489746 ], [ "▁Dorothy", -13.247817039489746 ], [ "blätter", -13.247817993164062 ], [ "éloign", -13.247817993164062 ], [ "▁Anglia", -13.247990608215332 ], [ "brach", -13.248013496398926 ], [ "▁Optimization", -13.248085021972656 ], [ "6.7", -13.248170852661133 ], [ "winkel", -13.248210906982422 ], [ "contenan", -13.248347282409668 ], [ "Astăzi", -13.248398780822754 ], [ "wiped", -13.248441696166992 ], [ "granting", -13.248665809631348 ], [ "▁plăti", -13.248859405517578 ], [ "▁Compensation", -13.248979568481445 ], [ "▁Verkäufer", -13.248979568481445 ], [ "▁angajați", -13.248980522155762 ], [ "▁diminished", -13.24902057647705 ], [ "employment", -13.249250411987305 ], [ "yahoo", -13.249435424804688 ], [ "▁détrui", -13.249698638916016 ], [ "▁suffisant", -13.24982738494873 ], [ "▁Moldovei", -13.250144004821777 ], [ "▁Pokemon", -13.250144004821777 ], [ "▁Malcolm", -13.250144958496094 ], [ "▁mysteries", -13.250147819519043 ], [ "▁Diversity", -13.250149726867676 ], [ "▁clinique", -13.250327110290527 ], [ "landais", -13.250344276428223 ], [ "▁campanii", -13.250399589538574 ], [ "▁témoignage", -13.250439643859863 ], [ "▁paralel", -13.250467300415039 ], [ "▁travailleurs", -13.250576972961426 ], [ "▁salvage", -13.250580787658691 ], [ "▁crayon", -13.250732421875 ], [ "immédiat", -13.25085163116455 ], [ "hopped", -13.250958442687988 ], [ "▁senzor", -13.25102710723877 ], [ "▁imbunatati", -13.251073837280273 ], [ "▁capitalize", -13.2511568069458 ], [ "▁Elephant", -13.25130844116211 ], [ "▁insomnia", -13.25131607055664 ], [ "▁Ansicht", -13.251325607299805 ], [ "▁lupte", -13.251556396484375 ], [ "▁genomic", -13.251557350158691 ], [ "▁Grape", -13.251769065856934 ], [ "MONT", -13.25197982788086 ], [ "métiers", -13.252004623413086 ], [ "▁Pierce", -13.252123832702637 ], [ "consulted", -13.252388954162598 ], [ "▁Responsible", -13.252474784851074 ], [ "symmetry", -13.252476692199707 ], [ "▁sulfur", -13.252487182617188 ], [ "▁înapoi", -13.252510070800781 ], [ "▁Junction", -13.252549171447754 ], [ "▁trilogy", -13.252622604370117 ], [ "▁unkompliziert", -13.253059387207031 ], [ "▁zugänglich", -13.253059387207031 ], [ "▁préfèr", -13.253153800964355 ], [ "oarelor", -13.253361701965332 ], [ "langage", -13.253460884094238 ], [ "admired", -13.253589630126953 ], [ "platform", -13.253595352172852 ], [ "▁pluralit", -13.253616333007812 ], [ "▁betrachtet", -13.253643035888672 ], [ "▁reproduc", -13.253790855407715 ], [ "exemple", -13.25385570526123 ], [ "▁conspir", -13.254347801208496 ], [ "▁pelvi", -13.25437068939209 ], [ "leased", -13.254551887512207 ], [ "▁souffle", -13.254570960998535 ], [ "▁approprié", -13.254705429077148 ], [ "absorbing", -13.254817962646484 ], [ "dividing", -13.254855155944824 ], [ "herently", -13.255147933959961 ], [ "▁blister", -13.255179405212402 ], [ "löst", -13.255182266235352 ], [ "Apotheke", -13.255398750305176 ], [ "▁Asociaţi", -13.255424499511719 ], [ "education", -13.255904197692871 ], [ "▁retract", -13.255982398986816 ], [ "▁appraise", -13.255990982055664 ], [ "▁Debbie", -13.256075859069824 ], [ "▁arhitect", -13.256193161010742 ], [ "▁Mohamed", -13.256568908691406 ], [ "▁îndrept", -13.256568908691406 ], [ "▁exhaustive", -13.256753921508789 ], [ "▁Notebook", -13.257004737854004 ], [ "crashing", -13.257068634033203 ], [ "▁Betreiber", -13.257155418395996 ], [ "▁présidentielle", -13.257159233093262 ], [ "▁Träger", -13.257172584533691 ], [ "▁noteworthy", -13.257259368896484 ], [ "▁séparé", -13.257729530334473 ], [ "▁doppelt", -13.257795333862305 ], [ "tină", -13.258066177368164 ], [ "Quelques", -13.258085250854492 ], [ "culoarea", -13.258100509643555 ], [ "▁ethic", -13.258166313171387 ], [ "▁cohesive", -13.258329391479492 ], [ "▁congratulations", -13.258334159851074 ], [ "▁sovereignty", -13.25833797454834 ], [ "▁Aplica", -13.258413314819336 ], [ "▁Covenant", -13.25851058959961 ], [ "▁multicultural", -13.258591651916504 ], [ "assemblée", -13.258955001831055 ], [ "▁petals", -13.258974075317383 ], [ "erode", -13.259026527404785 ], [ "▁porumb", -13.259035110473633 ], [ "▁Barrier", -13.259050369262695 ], [ "▁WWE", -13.259085655212402 ], [ "Etwa", -13.259175300598145 ], [ "▁recunosc", -13.259271621704102 ], [ "▁turtle", -13.259415626525879 ], [ "▁vârf", -13.259444236755371 ], [ "▁Ranking", -13.259448051452637 ], [ "▁sympathetic", -13.259514808654785 ], [ "exploded", -13.2595796585083 ], [ "▁influenț", -13.259591102600098 ], [ "▁Fireplace", -13.25972843170166 ], [ "▁Nachwuchs", -13.260090827941895 ], [ "▁empfohlen", -13.260090827941895 ], [ "Voir", -13.260661125183105 ], [ "▁Vimeo", -13.26069164276123 ], [ "▁weaving", -13.260967254638672 ], [ "beneficiar", -13.261198043823242 ], [ "▁balade", -13.261216163635254 ], [ "▁Mercy", -13.261566162109375 ], [ "3.000", -13.26181697845459 ], [ "Immediately", -13.261857032775879 ], [ "▁frosting", -13.261868476867676 ], [ "▁Fiscal", -13.261882781982422 ], [ "downloadable", -13.26188850402832 ], [ "▁Hwy", -13.261902809143066 ], [ "évoluer", -13.261951446533203 ], [ "▁vieille", -13.2620210647583 ], [ "heißen", -13.262436866760254 ], [ "▁étrangère", -13.262446403503418 ], [ "▁incapable", -13.262490272521973 ], [ "volunteered", -13.262520790100098 ], [ "fortunately", -13.262564659118652 ], [ "company", -13.262738227844238 ], [ "denkt", -13.2627592086792 ], [ "▁citesc", -13.262818336486816 ], [ "▁intrebare", -13.262896537780762 ], [ "pleasantly", -13.262990951538086 ], [ "▁Minecraft", -13.263079643249512 ], [ "▁Schmuck", -13.26308536529541 ], [ "▁maghiar", -13.263099670410156 ], [ "conductive", -13.263339042663574 ], [ "décrit", -13.263534545898438 ], [ "provide", -13.26353931427002 ], [ "▁depăş", -13.263628959655762 ], [ "ituated", -13.263657569885254 ], [ "▁trumpet", -13.264216423034668 ], [ "▁nastere", -13.2642240524292 ], [ "▁Région", -13.264245986938477 ], [ "Occupational", -13.264411926269531 ], [ "▁Grecia", -13.264415740966797 ], [ "▁Conclusion", -13.26449203491211 ], [ "▁collaborateurs", -13.264927864074707 ], [ "▁Alibaba", -13.265398025512695 ], [ "▁amplasat", -13.265398979187012 ], [ "▁Plastik", -13.265992164611816 ], [ "▁stash", -13.266023635864258 ], [ "▁Bonnie", -13.266045570373535 ], [ "▁ehrlich", -13.266156196594238 ], [ "▁contention", -13.266193389892578 ], [ "▁Oslo", -13.266263008117676 ], [ "englische", -13.266319274902344 ], [ "measurable", -13.266439437866211 ], [ "loppy", -13.266470909118652 ], [ "▁Refrigerat", -13.266579627990723 ], [ "▁remboursement", -13.266580581665039 ], [ "▁societăţi", -13.266580581665039 ], [ "translates", -13.266607284545898 ], [ "ichtigkeit", -13.266685485839844 ], [ "agentur", -13.266741752624512 ], [ "▁compute", -13.266800880432129 ], [ "berater", -13.266921043395996 ], [ "▁Georgetown", -13.266945838928223 ], [ "wolves", -13.266951560974121 ], [ "ceased", -13.266959190368652 ], [ "▁Binary", -13.267030715942383 ], [ "▁kontrolliert", -13.267172813415527 ], [ "informer", -13.267416000366211 ], [ "lehrer", -13.267578125 ], [ "lieferung", -13.267709732055664 ], [ "▁definit", -13.267742156982422 ], [ "chèque", -13.267765045166016 ], [ "▁clergy", -13.267765045166016 ], [ "▁ministries", -13.267767906188965 ], [ "▁plague", -13.267779350280762 ], [ "▁Jedi", -13.267805099487305 ], [ "▁Blackjack", -13.268025398254395 ], [ "▁subsection", -13.26807689666748 ], [ "▁Sachsen", -13.268121719360352 ], [ "valorile", -13.268146514892578 ], [ "molded", -13.26816463470459 ], [ "▁betroffen", -13.268183708190918 ], [ "▁adecvat", -13.268229484558105 ], [ "▁collègue", -13.26835823059082 ], [ "▁chinez", -13.268392562866211 ], [ "emelle", -13.268695831298828 ], [ "▁körperliche", -13.268902778625488 ], [ "▁titan", -13.26891040802002 ], [ "▁sophistication", -13.268951416015625 ], [ "▁provoke", -13.268957138061523 ], [ "▁pensii", -13.269042015075684 ], [ "▁Tucker", -13.269377708435059 ], [ "▁motoare", -13.26943302154541 ], [ "supported", -13.269536972045898 ], [ "▁Sicil", -13.269697189331055 ], [ "▁Ausgangs", -13.26987361907959 ], [ "▁verletzt", -13.269908905029297 ], [ "Ligue", -13.269996643066406 ], [ "▁organizatori", -13.270026206970215 ], [ "▁apprentice", -13.270099639892578 ], [ "▁Potato", -13.270183563232422 ], [ "▁Duft", -13.27039623260498 ], [ "▁medicament", -13.270566940307617 ], [ "Hôtel", -13.270740509033203 ], [ "▁Triangle", -13.270842552185059 ], [ "buted", -13.271100044250488 ], [ "▁Bentley", -13.271336555480957 ], [ "următoarele", -13.271389961242676 ], [ "animate", -13.271404266357422 ], [ "megapixel", -13.271404266357422 ], [ "einfachen", -13.271514892578125 ], [ "▁performanț", -13.271544456481934 ], [ "lurry", -13.27184009552002 ], [ "suffisamment", -13.27192211151123 ], [ "▁Weihnachten", -13.27192211151123 ], [ "▁Detective", -13.27194595336914 ], [ "▁lovit", -13.272049903869629 ], [ "▁blouse", -13.27213191986084 ], [ "▁hartie", -13.272163391113281 ], [ "vro", -13.27225112915039 ], [ "▁disastrous", -13.272517204284668 ], [ "vermutlich", -13.2725191116333 ], [ "▁Stafford", -13.272527694702148 ], [ "ehlt", -13.272628784179688 ], [ "▁vielseitig", -13.272643089294434 ], [ "Manifest", -13.273274421691895 ], [ "homage", -13.27354907989502 ], [ "menée", -13.273566246032715 ], [ "▁erläuter", -13.27370834350586 ], [ "▁volontaire", -13.273709297180176 ], [ "wrought", -13.27371597290039 ], [ "▁Naples", -13.273719787597656 ], [ "recommending", -13.273759841918945 ], [ "▁thermique", -13.273774147033691 ], [ "▁subtitle", -13.273787498474121 ], [ "▁Slam", -13.273809432983398 ], [ "▁necesitate", -13.273809432983398 ], [ "trimmed", -13.274099349975586 ], [ "urmatoarele", -13.274178504943848 ], [ "▁Sorin", -13.274245262145996 ], [ "▁compromis", -13.274300575256348 ], [ "overcoming", -13.274477005004883 ], [ "▁Samantha", -13.274901390075684 ], [ "dazzling", -13.27490234375 ], [ "▁Pearson", -13.274903297424316 ], [ "▁glazing", -13.274911880493164 ], [ "Revelation", -13.274921417236328 ], [ "destinée", -13.275156021118164 ], [ "öffnet", -13.27515983581543 ], [ "CERT", -13.275327682495117 ], [ "▁Sneak", -13.275503158569336 ], [ "proiectele", -13.275605201721191 ], [ "▁longitudinal", -13.27609634399414 ], [ "▁cocaine", -13.276098251342773 ], [ "▁universitar", -13.276108741760254 ], [ "▁refreshments", -13.276166915893555 ], [ "▁instanţ", -13.276243209838867 ], [ "▁kostenfrei", -13.276397705078125 ], [ "▁comédie", -13.276451110839844 ], [ "▁Locat", -13.276725769042969 ], [ "▁Albania", -13.276732444763184 ], [ "▁mécanique", -13.276776313781738 ], [ "messung", -13.27683162689209 ], [ "issus", -13.277260780334473 ], [ "pinned", -13.277328491210938 ], [ "▁sanft", -13.277335166931152 ], [ "▁geprüft", -13.277435302734375 ], [ "▁procè", -13.277442932128906 ], [ "▁Üb", -13.277765274047852 ], [ "5-0", -13.277802467346191 ], [ "▁Catering", -13.277957916259766 ], [ "▁prosperous", -13.27801513671875 ], [ "▁replication", -13.278098106384277 ], [ "▁obese", -13.278441429138184 ], [ "clerosis", -13.278489112854004 ], [ "▁Carnegie", -13.278489112854004 ], [ "▁Incredible", -13.278489112854004 ], [ "▁Teppich", -13.278489112854004 ], [ "▁crunchy", -13.278489112854004 ], [ "▁vomiting", -13.278529167175293 ], [ "▁sourire", -13.278619766235352 ], [ "publish", -13.278948783874512 ], [ "▁exterioar", -13.279094696044922 ], [ "▁forehead", -13.279107093811035 ], [ "▁climatique", -13.279313087463379 ], [ "▁conservator", -13.279458999633789 ], [ "▁Russland", -13.279687881469727 ], [ "▁kombiniert", -13.279687881469727 ], [ "▁Thrones", -13.279688835144043 ], [ "▁Griffith", -13.27968978881836 ], [ "▁fragrant", -13.279695510864258 ], [ "▁RSVP", -13.279698371887207 ], [ "klima", -13.279751777648926 ], [ "▁situație", -13.279808044433594 ], [ "deschiderea", -13.280009269714355 ], [ "▁moale", -13.280033111572266 ], [ "▁Trevor", -13.280112266540527 ], [ "ménager", -13.28011417388916 ], [ "deploying", -13.280428886413574 ], [ "▁Loft", -13.280500411987305 ], [ "▁Willkommen", -13.28059196472168 ], [ "▁Bezirks", -13.280887603759766 ], [ "▁Himself", -13.280975341796875 ], [ "▁quarant", -13.28101634979248 ], [ "▁1901", -13.281079292297363 ], [ "▁tripod", -13.28136920928955 ], [ "▁récolt", -13.281553268432617 ], [ "natură", -13.281631469726562 ], [ "School", -13.281649589538574 ], [ "contested", -13.281773567199707 ], [ "bwohl", -13.281784057617188 ], [ "Darren", -13.281830787658691 ], [ "medicine", -13.281903266906738 ], [ "▁Impuls", -13.282041549682617 ], [ "prevailing", -13.282057762145996 ], [ "▁orthodontic", -13.282089233398438 ], [ "▁sequential", -13.282089233398438 ], [ "▁Kolkata", -13.28209114074707 ], [ "▁séch", -13.282100677490234 ], [ "▁diaper", -13.28212833404541 ], [ "▁simplifie", -13.282144546508789 ], [ "▁reflux", -13.282163619995117 ], [ "▁Hypo", -13.282242774963379 ], [ "imprimer", -13.282251358032227 ], [ "▁Folosi", -13.282401084899902 ], [ "Info", -13.282570838928223 ], [ "▁Investiga", -13.282801628112793 ], [ "stabilirea", -13.282845497131348 ], [ "élis", -13.283149719238281 ], [ "ccessed", -13.28320026397705 ], [ "▁recyclable", -13.283293724060059 ], [ "▁forbidden", -13.283295631408691 ], [ "▁Colonel", -13.283297538757324 ], [ "▁nisip", -13.28330135345459 ], [ "▁Fundamental", -13.283303260803223 ], [ "▁nouveauté", -13.283308029174805 ], [ "khi", -13.283357620239258 ], [ "▁ecology", -13.28339672088623 ], [ "▁filament", -13.283540725708008 ], [ "▁relentless", -13.283559799194336 ], [ "▁Behavior", -13.283669471740723 ], [ "titulaire", -13.283900260925293 ], [ "▁administrativ", -13.28404426574707 ], [ "▁Vorlage", -13.284209251403809 ], [ "zeigte", -13.28427791595459 ], [ "▁Bäume", -13.284497261047363 ], [ "▁Kartoffel", -13.284497261047363 ], [ "▁Possible", -13.284500122070312 ], [ "▁perturb", -13.28466510772705 ], [ "▁Grigor", -13.284717559814453 ], [ "▁streng", -13.284759521484375 ], [ "▁vânzare", -13.285101890563965 ], [ "concentrating", -13.285698890686035 ], [ "▁rechtzeitig", -13.2857027053833 ], [ "▁eternity", -13.28570556640625 ], [ "▁Puzzle", -13.28575611114502 ], [ "▁malade", -13.285775184631348 ], [ "▁Metallic", -13.285776138305664 ], [ "▁Unterhaltung", -13.285783767700195 ], [ "▁4:00", -13.285820960998535 ], [ "▁magique", -13.285908699035645 ], [ "▁cellphone", -13.285975456237793 ], [ "▁inhibition", -13.286023139953613 ], [ "▁remplacement", -13.286025047302246 ], [ "▁WWII", -13.286089897155762 ], [ "Eff", -13.286258697509766 ], [ "kontakt", -13.286832809448242 ], [ "Update", -13.286869049072266 ], [ "▁Emerald", -13.286910057067871 ], [ "▁hammock", -13.286910057067871 ], [ "POWER", -13.286917686462402 ], [ "automne", -13.286917686462402 ], [ "▁(2004)", -13.286961555480957 ], [ "▁participanți", -13.287012100219727 ], [ "1998)", -13.287014961242676 ], [ "▁deletion", -13.287186622619629 ], [ "▁Proiect", -13.287226676940918 ], [ "IDENT", -13.287504196166992 ], [ "▁precis", -13.287623405456543 ], [ "▁limp", -13.287676811218262 ], [ "▁Pompe", -13.287686347961426 ], [ "▁ménage", -13.28780746459961 ], [ "▁Wahrheit", -13.288119316101074 ], [ "▁Intelligent", -13.28812026977539 ], [ "▁instability", -13.2881441116333 ], [ "insurance", -13.288346290588379 ], [ "▁Nursery", -13.288352966308594 ], [ "▁synonym", -13.288427352905273 ], [ "▁ignite", -13.28848934173584 ], [ "▁Vernon", -13.28849983215332 ], [ "purchase", -13.288524627685547 ], [ "▁disponibilité", -13.288662910461426 ], [ "▁producţi", -13.28909969329834 ], [ "▁Pentagon", -13.289329528808594 ], [ "▁illumination", -13.289329528808594 ], [ "▁obsolete", -13.289329528808594 ], [ "▁unacceptable", -13.28933048248291 ], [ "Gleichzeitig", -13.289938926696777 ], [ "rutsch", -13.290071487426758 ], [ "viziuni", -13.290409088134766 ], [ "▁Nicaragua", -13.29054069519043 ], [ "▁hesitation", -13.290541648864746 ], [ "▁nascut", -13.290545463562012 ], [ "▁Warehouse", -13.29055404663086 ], [ "geboten", -13.290558815002441 ], [ "▁Lagos", -13.290844917297363 ], [ "produced", -13.290874481201172 ], [ "cativa", -13.291309356689453 ], [ "▁Tracy", -13.291326522827148 ], [ "Projekt", -13.291468620300293 ], [ "▁malaria", -13.291692733764648 ], [ "▁Baldwin", -13.291755676269531 ], [ "Take", -13.291791915893555 ], [ "▁fluctuations", -13.291844367980957 ], [ "▁titular", -13.29194450378418 ], [ "bmw", -13.291976928710938 ], [ "▁brevet", -13.29202651977539 ], [ "étapes", -13.292173385620117 ], [ "wikipedia", -13.292373657226562 ], [ "▁corporal", -13.292424201965332 ], [ "▁Schönheit", -13.2926664352417 ], [ "utilizatorii", -13.292695999145508 ], [ "INFO", -13.292807579040527 ], [ "▁formularul", -13.292900085449219 ], [ "femi", -13.292959213256836 ], [ "Konferenz", -13.29296875 ], [ "▁carnival", -13.29296875 ], [ "▁Kräuter", -13.292969703674316 ], [ "▁gelernt", -13.292981147766113 ], [ "▁Sherman", -13.293017387390137 ], [ "▁persistence", -13.293289184570312 ], [ "▁Behörden", -13.293577194213867 ], [ "▁Frühjahr", -13.293578147888184 ], [ "▁Guvern", -13.293649673461914 ], [ "interpreting", -13.293878555297852 ], [ "▁nommé", -13.294021606445312 ], [ "consult", -13.294035911560059 ], [ "▁obligaţi", -13.294184684753418 ], [ "▁Newspaper", -13.2942476272583 ], [ "(2005)", -13.294515609741211 ], [ "pumped", -13.294614791870117 ], [ "▁autoritati", -13.294634819030762 ], [ "▁aplicatii", -13.294644355773926 ], [ "▁verhindert", -13.294794082641602 ], [ "▁évident", -13.294794082641602 ], [ "▁getrennt", -13.294795036315918 ], [ "▁Encourage", -13.295403480529785 ], [ "▁lurk", -13.295432090759277 ], [ "▁condemned", -13.295455932617188 ], [ "▁4:30", -13.295502662658691 ], [ "labelled", -13.29576587677002 ], [ "ordinea", -13.295899391174316 ], [ "▁pantofi", -13.296012878417969 ], [ "Default", -13.296042442321777 ], [ "▁beruh", -13.296120643615723 ], [ "/01/", -13.296268463134766 ], [ "league", -13.296503067016602 ], [ "▁couvert", -13.296524047851562 ], [ "▁competencies", -13.296622276306152 ], [ "▁mozzarella", -13.296622276306152 ], [ "jihad", -13.29662799835205 ], [ "▁gossip", -13.29662799835205 ], [ "▁Omaha", -13.296628952026367 ], [ "▁coincidence", -13.296669960021973 ], [ "▁Pinot", -13.296710968017578 ], [ "dotted", -13.296789169311523 ], [ "schilder", -13.297197341918945 ], [ "▁Munte", -13.297224998474121 ], [ "▁Vermieter", -13.297232627868652 ], [ "▁britannique", -13.297232627868652 ], [ "▁comentariu", -13.297235488891602 ], [ "abonnement", -13.29725456237793 ], [ "▁inventive", -13.29727840423584 ], [ "complie", -13.297279357910156 ], [ "composée", -13.29734992980957 ], [ "▁glatt", -13.297684669494629 ], [ "adorned", -13.297842979431152 ], [ "▁Opportunities", -13.297842979431152 ], [ "▁equilibrium", -13.297842979431152 ], [ "▁persuasive", -13.297842979431152 ], [ "▁achiziţi", -13.297843933105469 ], [ "▁déterminer", -13.297843933105469 ], [ "▁fleece", -13.297857284545898 ], [ "▁ivory", -13.29786205291748 ], [ "▁Genuss", -13.297900199890137 ], [ "Thousands", -13.297930717468262 ], [ "▁izolat", -13.297965049743652 ], [ "▁symbolize", -13.298033714294434 ], [ "gâteau", -13.298051834106445 ], [ "▁relații", -13.298062324523926 ], [ "▁Classroom", -13.298144340515137 ], [ "settlers", -13.298155784606934 ], [ "▁vremuri", -13.298195838928223 ], [ "▁Serial", -13.29838752746582 ], [ "▁boite", -13.298399925231934 ], [ "équivalent", -13.298453330993652 ], [ "▁benutzen", -13.298454284667969 ], [ "▁Recomand", -13.298462867736816 ], [ "▁Sinai", -13.298968315124512 ], [ "▁Advertise", -13.29906940460205 ], [ "▁Thermal", -13.299206733703613 ], [ "fiance", -13.299471855163574 ], [ "▁universitaire", -13.299683570861816 ], [ "▁rivière", -13.299793243408203 ], [ "▁reimburse", -13.299907684326172 ], [ "ţara", -13.299932479858398 ], [ "tician", -13.30002498626709 ], [ "intelligence", -13.300041198730469 ], [ "▁abgestimmt", -13.300288200378418 ], [ "▁compliqué", -13.300288200378418 ], [ "▁succulent", -13.300297737121582 ], [ "opéra", -13.300395011901855 ], [ "7-9", -13.300456047058105 ], [ "▁pierderi", -13.300654411315918 ], [ "extinction", -13.30090045928955 ], [ "▁Zweifel", -13.30103874206543 ], [ "ATCH", -13.30112361907959 ], [ "10,000", -13.301222801208496 ], [ "▁uninterrupted", -13.301513671875 ], [ "▁Eigentum", -13.301517486572266 ], [ "▁Utility", -13.301517486572266 ], [ "ско", -13.301529884338379 ], [ "▁tornado", -13.301544189453125 ], [ "▁Güte", -13.301727294921875 ], [ "▁pertain", -13.301923751831055 ], [ "painters", -13.301993370056152 ], [ "Help", -13.3021240234375 ], [ "▁străinătate", -13.30212688446045 ], [ "▁stammen", -13.302170753479004 ], [ "opposition", -13.302229881286621 ], [ "▁rhino", -13.302233695983887 ], [ "intervenir", -13.302427291870117 ], [ "▁hyperlink", -13.302441596984863 ], [ "höchst", -13.302518844604492 ], [ "roach", -13.302627563476562 ], [ "wSt", -13.302687644958496 ], [ "▁monastery", -13.302740097045898 ], [ "▁algae", -13.302754402160645 ], [ "▁shaving", -13.302757263183594 ], [ "présentent", -13.302804946899414 ], [ "Africa", -13.302860260009766 ], [ "eigener", -13.303047180175781 ], [ "▁glace", -13.303153991699219 ], [ "▁discurs", -13.303179740905762 ], [ "▁autograph", -13.303204536437988 ], [ "▁Conflict", -13.303359031677246 ], [ "▁școli", -13.303411483764648 ], [ "▁excerpt", -13.303617477416992 ], [ "correlated", -13.303628921508789 ], [ "empel", -13.303841590881348 ], [ "cryptocurrencies", -13.30396842956543 ], [ "▁symposium", -13.30396842956543 ], [ "▁gewohnt", -13.303994178771973 ], [ "PTSD", -13.304070472717285 ], [ "▁harmonic", -13.304166793823242 ], [ "discarded", -13.304282188415527 ], [ "▁Flint", -13.304359436035156 ], [ "Russia", -13.304422378540039 ], [ "▁ședinț", -13.304583549499512 ], [ "▁accusations", -13.304727554321289 ], [ "▁încălc", -13.304827690124512 ], [ "sendung", -13.305152893066406 ], [ "▁Chiropractic", -13.305197715759277 ], [ "▁excepți", -13.305201530456543 ], [ "▁proclaim", -13.305201530456543 ], [ "▁Flexible", -13.305295944213867 ], [ "▁Hüt", -13.30538272857666 ], [ "▁Baltic", -13.30539608001709 ], [ "▁inaltime", -13.30553913116455 ], [ "▁montré", -13.305868148803711 ], [ "exécution", -13.305898666381836 ], [ "partei", -13.305961608886719 ], [ "▁specifie", -13.306072235107422 ], [ "▁Jackpot", -13.306105613708496 ], [ "▁stumble", -13.306134223937988 ], [ "▁individuel", -13.306161880493164 ], [ "▁Veteran", -13.306217193603516 ], [ "▁Supplies", -13.306428909301758 ], [ "▁excavation", -13.306428909301758 ], [ "▁Libraries", -13.306469917297363 ], [ "▁prénom", -13.306476593017578 ], [ "WOOD", -13.30650806427002 ], [ "meciul", -13.306917190551758 ], [ "Chef", -13.306938171386719 ], [ "▁SUPER", -13.306940078735352 ], [ "Appeals", -13.30696964263916 ], [ "terapia", -13.307113647460938 ], [ "▁relatii", -13.30713939666748 ], [ "modifying", -13.30748462677002 ], [ "▁Regulament", -13.307662010192871 ], [ "▁bănci", -13.307662963867188 ], [ "▁agility", -13.307666778564453 ], [ "▁Magnetic", -13.307674407958984 ], [ "▁piatra", -13.30767822265625 ], [ "▁Governance", -13.307680130004883 ], [ "▁clown", -13.30772876739502 ], [ "▁Choir", -13.308337211608887 ], [ "aujourd", -13.308548927307129 ], [ "▁vendeur", -13.308732032775879 ], [ "ndererseits", -13.308859825134277 ], [ "▁Bahrain", -13.3088960647583 ], [ "▁Timisoara", -13.3088960647583 ], [ "▁exklusive", -13.3088960647583 ], [ "▁Population", -13.309001922607422 ], [ "▁nepo", -13.309073448181152 ], [ "▁relish", -13.309085845947266 ], [ "▁Pumpkin", -13.309571266174316 ], [ "▁détente", -13.309784889221191 ], [ "▁episcop", -13.309860229492188 ], [ "patterned", -13.309929847717285 ], [ "▁THANK", -13.310132026672363 ], [ "▁Widerspruch", -13.310132026672363 ], [ "▁Crisis", -13.310189247131348 ], [ "▁goose", -13.310226440429688 ], [ "▁couture", -13.310307502746582 ], [ "▁hinweg", -13.310446739196777 ], [ "supplemental", -13.310486793518066 ], [ "shingles", -13.31060791015625 ], [ "investir", -13.310635566711426 ], [ "▁steriliz", -13.310759544372559 ], [ "tractors", -13.310761451721191 ], [ "cellules", -13.31078815460205 ], [ "▁Gloria", -13.310888290405273 ], [ "▁teilnehmen", -13.311092376708984 ], [ "companiile", -13.311248779296875 ], [ "surfacing", -13.311279296875 ], [ "▁nostalgic", -13.311368942260742 ], [ "▁Badezimmer", -13.311369895935059 ], [ "▁conjoint", -13.311370849609375 ], [ "vacancy", -13.31145191192627 ], [ "▁homeland", -13.311582565307617 ], [ "▁Abschnitt", -13.311625480651855 ], [ "Cartea", -13.311653137207031 ], [ "SIA", -13.311782836914062 ], [ "▁explode", -13.311786651611328 ], [ "fostering", -13.311959266662598 ], [ "▁ceilalti", -13.31198787689209 ], [ "▁gentil", -13.31214714050293 ], [ "oplasty", -13.31218433380127 ], [ "bodied", -13.312424659729004 ], [ "▁1906", -13.312499046325684 ], [ "▁BlackBerry", -13.312607765197754 ], [ "▁Presbyterian", -13.312607765197754 ], [ "▁berücksichtigt", -13.312607765197754 ], [ "▁compartiment", -13.312607765197754 ], [ "▁compulsory", -13.312607765197754 ], [ "Millennial", -13.312609672546387 ], [ "▁sanitar", -13.312638282775879 ], [ "▁stink", -13.312975883483887 ], [ "lius", -13.313047409057617 ], [ "thankfully", -13.313136100769043 ], [ "modalité", -13.313173294067383 ], [ "▁cunoaște", -13.313226699829102 ], [ "Infrastruktur", -13.313227653503418 ], [ "▁studenți", -13.313253402709961 ], [ "Bref", -13.313270568847656 ], [ "London", -13.31360149383545 ], [ "▁Arduino", -13.313847541809082 ], [ "▁cilantro", -13.313847541809082 ], [ "▁Rafael", -13.313848495483398 ], [ "▁untersucht", -13.313861846923828 ], [ "▁martyr", -13.31389331817627 ], [ "▁Mormon", -13.313984870910645 ], [ "▁wicket", -13.313996315002441 ], [ "cherished", -13.314335823059082 ], [ "liquid", -13.314417839050293 ], [ "▁dorinț", -13.314571380615234 ], [ "lehnt", -13.314717292785645 ], [ "meisterschaft", -13.31493091583252 ], [ "fondateur", -13.314971923828125 ], [ "câble", -13.315078735351562 ], [ "▁erreichbar", -13.315091133117676 ], [ "▁footsteps", -13.315094947814941 ], [ "▁Kloster", -13.31519889831543 ], [ "▁multiplayer", -13.315218925476074 ], [ "▁substitu", -13.315276145935059 ], [ "▁Frisch", -13.315526962280273 ], [ "▁arsenal", -13.315712928771973 ], [ "explication", -13.315866470336914 ], [ "▁conexiun", -13.315986633300781 ], [ "muddy", -13.316045761108398 ], [ "▁Reifen", -13.316120147705078 ], [ "auraient", -13.316132545471191 ], [ "▁biologic", -13.316136360168457 ], [ "▁acquainted", -13.316332817077637 ], [ "▁shelving", -13.316341400146484 ], [ "Stunning", -13.316373825073242 ], [ "▁Clothing", -13.316394805908203 ], [ "▁kidding", -13.316431999206543 ], [ "excellent", -13.316452026367188 ], [ "▁susțin", -13.316487312316895 ], [ "bătut", -13.316502571105957 ], [ "elusive", -13.3165283203125 ], [ "werbung", -13.316743850708008 ], [ "slipping", -13.316813468933105 ], [ "▁configura", -13.316926956176758 ], [ "▁proaspat", -13.31695556640625 ], [ "▁apporté", -13.317120552062988 ], [ "▁démarr", -13.317328453063965 ], [ "Spezialist", -13.317578315734863 ], [ "▁obligați", -13.317578315734863 ], [ "▁societăți", -13.317578315734863 ], [ "▁malpractice", -13.31757926940918 ], [ "Hundreds", -13.317609786987305 ], [ "▁3:1", -13.318138122558594 ], [ "▁computation", -13.31817626953125 ], [ "▁Heilig", -13.318528175354004 ], [ "▁Helsinki", -13.318824768066406 ], [ "▁firefighters", -13.318824768066406 ], [ "▁obedience", -13.318824768066406 ], [ "▁evacuate", -13.318825721740723 ], [ "▁Floyd", -13.318840026855469 ], [ "▁Disneyland", -13.318859100341797 ], [ "Cathy", -13.319069862365723 ], [ "▁Broken", -13.319278717041016 ], [ "cript", -13.319952011108398 ], [ "▁Gewähr", -13.320073127746582 ], [ "▁embarrassed", -13.320073127746582 ], [ "▁Leicht", -13.32007884979248 ], [ "▁témoign", -13.320379257202148 ], [ "▁viteze", -13.3206148147583 ], [ "▁hallmark", -13.320731163024902 ], [ "uploads", -13.32082462310791 ], [ "▁Submission", -13.320929527282715 ], [ "▁croissant", -13.321049690246582 ], [ "awning", -13.32105827331543 ], [ "detecting", -13.321198463439941 ], [ "▁Bahamas", -13.321322441101074 ], [ "▁Kathleen", -13.321325302124023 ], [ "▁latch", -13.321377754211426 ], [ "▁pronounce", -13.321380615234375 ], [ "▁choke", -13.321428298950195 ], [ "▁$50,000", -13.3215970993042 ], [ "▁historische", -13.321642875671387 ], [ "jugé", -13.321829795837402 ], [ "▁MasterCard", -13.321949005126953 ], [ "▁Horror", -13.321955680847168 ], [ "spoiled", -13.321958541870117 ], [ "▁apariți", -13.32202434539795 ], [ "geschaltet", -13.3225736618042 ], [ "▁Londra", -13.322578430175781 ], [ "viction", -13.322580337524414 ], [ "▁Disaster", -13.322593688964844 ], [ "▁desigur", -13.322601318359375 ], [ "▁substanț", -13.322601318359375 ], [ "▁compiler", -13.322613716125488 ], [ "▁vanzari", -13.32262897491455 ], [ "▁Simulation", -13.322669982910156 ], [ "Occasionally", -13.322842597961426 ], [ "Seite", -13.322884559631348 ], [ "Linked", -13.322938919067383 ], [ "Roll", -13.323015213012695 ], [ "▁trajet", -13.323244094848633 ], [ "Molecular", -13.323834419250488 ], [ "▁pragmatic", -13.323843002319336 ], [ "judecată", -13.323915481567383 ], [ "ров", -13.32400894165039 ], [ "serrurerie", -13.324024200439453 ], [ "▁reconstruct", -13.324129104614258 ], [ "▁heureuse", -13.324179649353027 ], [ "▁knight", -13.32422924041748 ], [ "knowingly", -13.324431419372559 ], [ "▁perspectiva", -13.324453353881836 ], [ "ordinary", -13.324604034423828 ], [ "▁chaudière", -13.324721336364746 ], [ "Neill", -13.324727058410645 ], [ "cellulose", -13.325080871582031 ], [ "▁Delicious", -13.325080871582031 ], [ "▁incearca", -13.325080871582031 ], [ "▁retrospective", -13.325080871582031 ], [ "▁mundane", -13.325081825256348 ], [ "▁definiert", -13.32508659362793 ], [ "▁cockpit", -13.325088500976562 ], [ "Aktionen", -13.325363159179688 ], [ "▁distanț", -13.325654029846191 ], [ "▁diplôme", -13.325708389282227 ], [ "prepaid", -13.325737953186035 ], [ "▁Tabellen", -13.325758934020996 ], [ "▁economie", -13.325770378112793 ], [ "December", -13.325826644897461 ], [ "Punkten", -13.32613754272461 ], [ "▁Punch", -13.32614517211914 ], [ "Martin", -13.326154708862305 ], [ "▁Espresso", -13.326314926147461 ], [ "▁ubiquitous", -13.326335906982422 ], [ "▁Mongolia", -13.326337814331055 ], [ "▁collabor", -13.326635360717773 ], [ "▁Vordergrund", -13.32696533203125 ], [ "cameră", -13.327091217041016 ], [ "represented", -13.327268600463867 ], [ "▁AUTO", -13.327446937561035 ], [ "▁Ofert", -13.327542304992676 ], [ "neig", -13.327593803405762 ], [ "▁Hazard", -13.327595710754395 ], [ "▁Constanta", -13.327596664428711 ], [ "▁tumour", -13.32759952545166 ], [ "▁Neighborhood", -13.327603340148926 ], [ "▁detaliat", -13.327619552612305 ], [ "▁extraordinaire", -13.327665328979492 ], [ "▁Therapeutic", -13.327686309814453 ], [ "predicting", -13.327693939208984 ], [ "▁institutii", -13.32776165008545 ], [ "ifizierung", -13.327797889709473 ], [ "wählt", -13.328207015991211 ], [ "▁remarquable", -13.32822322845459 ], [ "Invent", -13.328512191772461 ], [ "▁foloseșt", -13.328514099121094 ], [ "öfte", -13.328703880310059 ], [ "▁discreet", -13.328853607177734 ], [ "▁Flickr", -13.32885456085205 ], [ "▁trésor", -13.328856468200684 ], [ "▁steroids", -13.328872680664062 ], [ "▁personnalité", -13.328953742980957 ], [ "▁Krankenhaus", -13.32901668548584 ], [ "▁affordability", -13.329218864440918 ], [ "deuten", -13.329398155212402 ], [ "Detailed", -13.329412460327148 ], [ "Walk", -13.329444885253906 ], [ "▁parallèle", -13.329483032226562 ], [ "thèse", -13.329649925231934 ], [ "▁gefördert", -13.330117225646973 ], [ "Greeting", -13.33014965057373 ], [ "gelistet", -13.330172538757324 ], [ "▁chlorine", -13.330392837524414 ], [ "behält", -13.33039665222168 ], [ "emption", -13.330435752868652 ], [ "▁mobilité", -13.330601692199707 ], [ "▁randonnée", -13.330668449401855 ], [ "habitant", -13.330718040466309 ], [ "zilla", -13.331082344055176 ], [ "▁Lili", -13.331160545349121 ], [ "▁répét", -13.331341743469238 ], [ "trucât", -13.331376075744629 ], [ "▁Hospice", -13.331376075744629 ], [ "▁grassroots", -13.331377029418945 ], [ "▁affiché", -13.331393241882324 ], [ "pears", -13.331470489501953 ], [ "▁linistit", -13.331497192382812 ], [ "▁Patron", -13.331552505493164 ], [ "▁Stalin", -13.331626892089844 ], [ "▁închiri", -13.331751823425293 ], [ "▁Apostol", -13.332018852233887 ], [ "▁poudre", -13.332246780395508 ], [ "▁piscin", -13.332419395446777 ], [ "merlin", -13.33259391784668 ], [ "limited", -13.33260726928711 ], [ "▁métallique", -13.332639694213867 ], [ "gazebo", -13.33267879486084 ], [ "weilige", -13.332718849182129 ], [ "prosecutors", -13.33278751373291 ], [ "Expert", -13.33314323425293 ], [ "Assemblée", -13.333271980285645 ], [ "▁fauna", -13.333285331726074 ], [ "▁Turtle", -13.333353996276855 ], [ "▁Consortium", -13.333905220031738 ], [ "▁assemblies", -13.333905220031738 ], [ "▁trajectory", -13.333905220031738 ], [ "▁Vineyard", -13.333906173706055 ], [ "▁Mehrwert", -13.334037780761719 ], [ "▁sunflower", -13.334043502807617 ], [ "develop", -13.334060668945312 ], [ "▁heroic", -13.334100723266602 ], [ "▁riscuri", -13.334151268005371 ], [ "oeuf", -13.334300994873047 ], [ "influence", -13.334452629089355 ], [ "▁Voraussetzung", -13.334500312805176 ], [ "utoritatea", -13.334518432617188 ], [ "Produsul", -13.334654808044434 ], [ "▁gewährleistet", -13.335171699523926 ], [ "▁brûl", -13.335175514221191 ], [ "▁Column", -13.335184097290039 ], [ "▁trousers", -13.335209846496582 ], [ "▁posterior", -13.33521556854248 ], [ "glyph", -13.335251808166504 ], [ "▁Happen", -13.335280418395996 ], [ "▁créateur", -13.335667610168457 ], [ "▁apostle", -13.335898399353027 ], [ "▁padding", -13.335907936096191 ], [ "▁Digitalisierung", -13.335908889770508 ], [ "▁Laurie", -13.335915565490723 ], [ "▁Erwerb", -13.336065292358398 ], [ "▁bătrân", -13.336440086364746 ], [ "▁harmonious", -13.336441040039062 ], [ "▁ailments", -13.336456298828125 ], [ "▁Venue", -13.33650016784668 ], [ "▁Motorcycle", -13.336523056030273 ], [ "▁cortex", -13.336551666259766 ], [ "▁Sunrise", -13.336636543273926 ], [ "Software", -13.336775779724121 ], [ "▁advocat", -13.336934089660645 ], [ "essentiellement", -13.337422370910645 ], [ "•", -13.337494850158691 ], [ "părut", -13.337522506713867 ], [ "▁Suffolk", -13.337711334228516 ], [ "▁righteousness", -13.337711334228516 ], [ "▁Shirley", -13.337712287902832 ], [ "▁Famous", -13.337749481201172 ], [ "▁emulate", -13.337788581848145 ], [ "vermögen", -13.33788776397705 ], [ "generated", -13.337963104248047 ], [ "Ecole", -13.337977409362793 ], [ "▁managerial", -13.338086128234863 ], [ "believe", -13.338091850280762 ], [ "▁récupére", -13.338348388671875 ], [ "▁recens", -13.338531494140625 ], [ "▁Barrett", -13.338778495788574 ], [ "▁courageous", -13.338814735412598 ], [ "9.95", -13.338961601257324 ], [ "▁Odyssey", -13.338982582092285 ], [ "▁Violence", -13.338982582092285 ], [ "▁concasseur", -13.338982582092285 ], [ "▁evacuation", -13.338982582092285 ], [ "▁kontinuierlich", -13.338982582092285 ], [ "▁epidemi", -13.3389892578125 ], [ "▁disconnected", -13.339197158813477 ], [ "frucht", -13.339339256286621 ], [ "Trustees", -13.339348793029785 ], [ "▁Massiv", -13.339459419250488 ], [ "gebucht", -13.339473724365234 ], [ "stütze", -13.339526176452637 ], [ "▁febr", -13.339741706848145 ], [ "honoured", -13.339743614196777 ], [ "▁digitiz", -13.340079307556152 ], [ "Image", -13.34021282196045 ], [ "▁Brunswick", -13.34025764465332 ], [ "▁Therapist", -13.34026050567627 ], [ "accessoire", -13.340264320373535 ], [ "▁croqu", -13.340291023254395 ], [ "Pflanz", -13.34052848815918 ], [ "dragging", -13.340536117553711 ], [ "▁Facilit", -13.340750694274902 ], [ "soucis", -13.340765953063965 ], [ "Asadar", -13.34081745147705 ], [ "▁Thames", -13.341021537780762 ], [ "▁cariera", -13.341116905212402 ], [ "▁mercury", -13.341530799865723 ], [ "▁Blessed", -13.341533660888672 ], [ "▁Whitney", -13.341630935668945 ], [ "▁géant", -13.341926574707031 ], [ "▁coordonnée", -13.342217445373535 ], [ "oidal", -13.342623710632324 ], [ "Wohnungen", -13.342696189880371 ], [ "▁Spectrum", -13.34280776977539 ], [ "▁Avengers", -13.342808723449707 ], [ "▁Gloucester", -13.342808723449707 ], [ "▁nützlich", -13.342811584472656 ], [ "▁toothbrush", -13.342830657958984 ], [ "▁Vanessa", -13.342843055725098 ], [ "Saxon", -13.342947959899902 ], [ "▁comunități", -13.343165397644043 ], [ "reprezentanţi", -13.343175888061523 ], [ "▁întâlnire", -13.343225479125977 ], [ "delve", -13.343234062194824 ], [ "▁technologique", -13.343452453613281 ], [ "Describe", -13.343466758728027 ], [ "▁constient", -13.343501091003418 ], [ "gestalt", -13.343600273132324 ], [ "▁Tribune", -13.344090461730957 ], [ "▁fiberglass", -13.34412956237793 ], [ "verbindung", -13.344210624694824 ], [ "sacrificing", -13.344351768493652 ], [ "▁Pablo", -13.344470024108887 ], [ "▁adanc", -13.34525203704834 ], [ "omia", -13.345309257507324 ], [ "hâte", -13.345317840576172 ], [ "▁Sanctuary", -13.345366477966309 ], [ "▁accolade", -13.345368385314941 ], [ "▁Wurzel", -13.345398902893066 ], [ "▁spacing", -13.345433235168457 ], [ "▁bedeutend", -13.345481872558594 ], [ "▁biased", -13.345499992370605 ], [ "randomized", -13.345747947692871 ], [ "▁agenți", -13.345856666564941 ], [ "▁excepţi", -13.346012115478516 ], [ "▁fișier", -13.346028327941895 ], [ "▁fisier", -13.34664535522461 ], [ "irrespective", -13.346648216247559 ], [ "▁Gardner", -13.34665584564209 ], [ "▁aprecia", -13.346884727478027 ], [ "▁Klu", -13.347082138061523 ], [ "▁apropie", -13.347535133361816 ], [ "▁echival", -13.347784042358398 ], [ "tauchen", -13.347862243652344 ], [ "▁hauptsächlich", -13.347930908203125 ], [ "▁pollutants", -13.347930908203125 ], [ "▁mammals", -13.347931861877441 ], [ "▁Landwirtschaft", -13.347936630249023 ], [ "▁stăpân", -13.34793758392334 ], [ "▁Prüf", -13.347990989685059 ], [ "▁Motorsport", -13.34807300567627 ], [ "Leaving", -13.348352432250977 ], [ "schädigung", -13.348573684692383 ], [ "▁calendrier", -13.348573684692383 ], [ "plikation", -13.348655700683594 ], [ "▁DOE", -13.348655700683594 ], [ "ред", -13.348966598510742 ], [ "Jahr", -13.34913444519043 ], [ "▁entitlement", -13.34921646118164 ], [ "schuldig", -13.349217414855957 ], [ "▁Münster", -13.349218368530273 ], [ "pository", -13.349451065063477 ], [ "▁numero", -13.350220680236816 ], [ "▁entsprechen", -13.350383758544922 ], [ "▁astronaut", -13.350502967834473 ], [ "▁hexagon", -13.350502967834473 ], [ "▁DAMAGE", -13.350503921508789 ], [ "▁Quartz", -13.350504875183105 ], [ "▁rédaction", -13.350504875183105 ], [ "▁replenish", -13.350508689880371 ], [ "▁amoureux", -13.350523948669434 ], [ "▁opțiun", -13.350616455078125 ], [ "Custom", -13.350622177124023 ], [ "▁Telekom", -13.350639343261719 ], [ "▁RFID", -13.351163864135742 ], [ "▁Scorpio", -13.351264953613281 ], [ "▁thirst", -13.35152816772461 ], [ "▁Kosovo", -13.351791381835938 ], [ "▁precursor", -13.351794242858887 ], [ "▁sarbatori", -13.351810455322266 ], [ "▁Daisy", -13.351828575134277 ], [ "▁Dropbox", -13.351898193359375 ], [ "Smith", -13.351949691772461 ], [ "contabil", -13.352191925048828 ], [ "▁monnaie", -13.352437973022461 ], [ "capsul", -13.352577209472656 ], [ "treff", -13.352760314941406 ], [ "beauftragte", -13.352761268615723 ], [ "industrial", -13.353006362915039 ], [ "responsables", -13.353010177612305 ], [ "▁FIRST", -13.353080749511719 ], [ "▁crezut", -13.35308837890625 ], [ "▁reseller", -13.353107452392578 ], [ "▁direcți", -13.353154182434082 ], [ "mouvoir", -13.353294372558594 ], [ "▁Invite", -13.353431701660156 ], [ "▁constructii", -13.353440284729004 ], [ "▁oublié", -13.353577613830566 ], [ "găseșt", -13.353687286376953 ], [ "▁végét", -13.353755950927734 ], [ "idine", -13.35385799407959 ], [ "▁Ajout", -13.353951454162598 ], [ "▁Shelf", -13.354195594787598 ], [ "HALL", -13.35422420501709 ], [ "▁nostalgia", -13.35437297821045 ], [ "▁ottoman", -13.35437297821045 ], [ "▁ambalaj", -13.354398727416992 ], [ "municipiul", -13.354405403137207 ], [ "NOVA", -13.354500770568848 ], [ "▁disregard", -13.354997634887695 ], [ "▁bijuterii", -13.355018615722656 ], [ "▁sorgfältig", -13.355018615722656 ], [ "vraient", -13.355307579040527 ], [ "▁backsplash", -13.355669975280762 ], [ "▁nuisance", -13.355679512023926 ], [ "▁Territory", -13.35568618774414 ], [ "▁surprins", -13.355693817138672 ], [ "enchanting", -13.35571002960205 ], [ "trospecti", -13.355847358703613 ], [ "▁dvd", -13.356199264526367 ], [ "Totally", -13.356329917907715 ], [ "▁Edelstahl", -13.35696029663086 ], [ "▁sequencing", -13.356961250305176 ], [ "▁Circus", -13.35696792602539 ], [ "▁ashamed", -13.35696792602539 ], [ "▁horrific", -13.357028007507324 ], [ "▁taiat", -13.357033729553223 ], [ "▁Angehörige", -13.357125282287598 ], [ "Michel", -13.357256889343262 ], [ "▁communion", -13.357298851013184 ], [ "▁psiho", -13.357378959655762 ], [ "losigkeit", -13.357405662536621 ], [ "dipping", -13.357512474060059 ], [ "▁profesională", -13.357608795166016 ], [ "Indiferent", -13.357609748840332 ], [ "▁crestin", -13.357723236083984 ], [ "wholesome", -13.357796669006348 ], [ "▁Welfare", -13.358257293701172 ], [ "▁plentiful", -13.358257293701172 ], [ "▁Triumph", -13.358258247375488 ], [ "▁fascination", -13.358260154724121 ], [ "▁vicious", -13.358291625976562 ], [ "▁Höchst", -13.358294486999512 ], [ "▁Dunkel", -13.358386039733887 ], [ "▁harass", -13.358406066894531 ], [ "ambogia", -13.358475685119629 ], [ "▁synonymous", -13.358598709106445 ], [ "bottom", -13.35879898071289 ], [ "▁bénévole", -13.358906745910645 ], [ "▁suprafaț", -13.358906745910645 ], [ "▁umplut", -13.358997344970703 ], [ "▁Teddy", -13.359162330627441 ], [ "breathable", -13.359292984008789 ], [ "▁Toshiba", -13.3595552444458 ], [ "▁seismic", -13.359569549560547 ], [ "▁dringend", -13.359583854675293 ], [ "▁cultură", -13.359585762023926 ], [ "▁Waffen", -13.359665870666504 ], [ "▁Bubble", -13.359702110290527 ], [ "▁Brigade", -13.359759330749512 ], [ "▁Blatt", -13.36012077331543 ], [ "▁scénario", -13.36020565032959 ], [ "allah", -13.360396385192871 ], [ "▁superintendent", -13.360855102539062 ], [ "pflanzen", -13.360856056213379 ], [ "▁kurzfristig", -13.360856056213379 ], [ "▁raspberry", -13.360876083374023 ], [ "▁Evident", -13.360904693603516 ], [ "▁inutile", -13.361076354980469 ], [ "prouvé", -13.361104011535645 ], [ "▁obtien", -13.36141300201416 ], [ "▁Matthias", -13.361506462097168 ], [ "▁déclench", -13.361506462097168 ], [ "Situationen", -13.361529350280762 ], [ "▁Disclaimer", -13.362156867980957 ], [ "▁loneliness", -13.362156867980957 ], [ "▁Gothic", -13.362164497375488 ], [ "▁humility", -13.362165451049805 ], [ "▁machiaj", -13.362175941467285 ], [ "▁Sophia", -13.362178802490234 ], [ "▁Forecast", -13.362265586853027 ], [ "IBLE", -13.362456321716309 ], [ "ivism", -13.362480163574219 ], [ "israel", -13.36278247833252 ], [ "▁kümmern", -13.362809181213379 ], [ "▁verbreitet", -13.362825393676758 ], [ "▁capacitor", -13.362832069396973 ], [ "deprived", -13.3634614944458 ], [ "unbiased", -13.3634614944458 ], [ "▁Dominique", -13.3634614944458 ], [ "▁Bamboo", -13.363462448120117 ], [ "▁Heinrich", -13.363465309143066 ], [ "individualized", -13.363550186157227 ], [ "▁ansprechen", -13.363776206970215 ], [ "ordinaire", -13.363801002502441 ], [ "▁Ucraina", -13.364112854003906 ], [ "▁militare", -13.364115715026855 ], [ "massif", -13.364352226257324 ], [ "▁emisiuni", -13.364501953125 ], [ "maladies", -13.364622116088867 ], [ "▁pneumonia", -13.364765167236328 ], [ "▁graffiti", -13.364767074584961 ], [ "▁Determine", -13.3648099899292 ], [ "▁Northwestern", -13.364893913269043 ], [ "▁grasimi", -13.364897727966309 ], [ "▁lebendig", -13.364920616149902 ], [ "▁cifre", -13.364946365356445 ], [ "▁accelerator", -13.36533260345459 ], [ "▁nib", -13.365374565124512 ], [ "▁Jocuri", -13.365400314331055 ], [ "▁außergewöhnlich", -13.365402221679688 ], [ "▁orchid", -13.36542797088623 ], [ "zugreifen", -13.365530967712402 ], [ "utilisent", -13.365662574768066 ], [ "▁nineteenth", -13.366071701049805 ], [ "improvisation", -13.366072654724121 ], [ "▁Disclosure", -13.366072654724121 ], [ "▁Überraschung", -13.366072654724121 ], [ "▁Casual", -13.366093635559082 ], [ "▁Witness", -13.366093635559082 ], [ "teacher", -13.366125106811523 ], [ "Printed", -13.366129875183105 ], [ "▁prețuri", -13.366189956665039 ], [ "rues", -13.366216659545898 ], [ "▁cerinte", -13.366338729858398 ], [ "rouvent", -13.36662483215332 ], [ "assembling", -13.36673355102539 ], [ "▁atenție", -13.366769790649414 ], [ "▁amintiri", -13.366782188415527 ], [ "▁sustinut", -13.366805076599121 ], [ "Digital", -13.367257118225098 ], [ "▁Deborah", -13.36738109588623 ], [ "gesichts", -13.367382049560547 ], [ "▁temperament", -13.367440223693848 ], [ "▁competency", -13.367447853088379 ], [ "▁dwarf", -13.367515563964844 ], [ "▁dureaz", -13.367539405822754 ], [ "habilit", -13.367764472961426 ], [ "leaned", -13.3679838180542 ], [ "▁illicit", -13.368348121643066 ], [ "Availability", -13.368691444396973 ], [ "▁Brașov", -13.368691444396973 ], [ "▁Pyramid", -13.368691444396973 ], [ "▁achievable", -13.368691444396973 ], [ "▁judiciaire", -13.368691444396973 ], [ "Übrigen", -13.368693351745605 ], [ "▁activism", -13.368795394897461 ], [ "▁boycott", -13.368839263916016 ], [ "Desigur", -13.368927001953125 ], [ "klingt", -13.369264602661133 ], [ "▁Leidenschaft", -13.369346618652344 ], [ "▁Richtig", -13.369701385498047 ], [ "▁Airbnb", -13.370002746582031 ], [ "▁învățământ", -13.370002746582031 ], [ "Kampagne", -13.370004653930664 ], [ "▁thumbnail", -13.370014190673828 ], [ "Bestimmungen", -13.370016098022461 ], [ "▁vollkommen", -13.37001895904541 ], [ "▁biomass", -13.370027542114258 ], [ "▁escalate", -13.370030403137207 ], [ "wächst", -13.370085716247559 ], [ "▁scăpa", -13.370098114013672 ], [ "▁résult", -13.37014389038086 ], [ "▁shrine", -13.370217323303223 ], [ "maximizing", -13.370370864868164 ], [ "avoue", -13.370492935180664 ], [ "dirigeants", -13.370665550231934 ], [ "▁cerveau", -13.370672225952148 ], [ "▁proast", -13.370955467224121 ], [ "▁contaminants", -13.371325492858887 ], [ "effectue", -13.37151050567627 ], [ "ediție", -13.371539115905762 ], [ "monetiz", -13.371772766113281 ], [ "▁deplasare", -13.371976852416992 ], [ "▁Sfant", -13.37209415435791 ], [ "ROOM", -13.372113227844238 ], [ "bushes", -13.372151374816895 ], [ "mairie", -13.37251091003418 ], [ "obligate", -13.372528076171875 ], [ "▁tug", -13.372573852539062 ], [ "▁Collector", -13.372632026672363 ], [ "▁annoyed", -13.372633934020996 ], [ "▁aerobic", -13.372654914855957 ], [ "▁integer", -13.372830390930176 ], [ "▁Upload", -13.373249053955078 ], [ "▁impartial", -13.37346076965332 ], [ "▁discuţi", -13.373623847961426 ], [ "gastrointestinal", -13.37394905090332 ], [ "▁chiropractor", -13.37394905090332 ], [ "▁treptat", -13.373950004577637 ], [ "▁fishermen", -13.37395191192627 ], [ "levitra", -13.3739595413208 ], [ "Gruppe", -13.373964309692383 ], [ "▁Apostle", -13.373970985412598 ], [ "▁conseillé", -13.374068260192871 ], [ "Isra", -13.37421703338623 ], [ "▁Persönlichkeit", -13.374431610107422 ], [ "▁cantitati", -13.374459266662598 ], [ "▁incredibil", -13.374614715576172 ], [ "▁Berater", -13.374800682067871 ], [ "▁propuneri", -13.374835014343262 ], [ "MEDIA", -13.375236511230469 ], [ "▁opaque", -13.37526798248291 ], [ "▁Nielsen", -13.375269889831543 ], [ "▁cartofi", -13.375277519226074 ], [ "▁Whale", -13.37533950805664 ], [ "erzeugen", -13.375890731811523 ], [ "▁knack", -13.375931739807129 ], [ "Kandidat", -13.375936508178711 ], [ "▁tradițional", -13.375937461853027 ], [ "zählige", -13.375983238220215 ], [ "▁Petroleum", -13.376588821411133 ], [ "▁deficiencies", -13.376588821411133 ], [ "▁persecution", -13.376588821411133 ], [ "▁zgomot", -13.376588821411133 ], [ "▁reiterate", -13.376592636108398 ], [ "▁Slice", -13.376670837402344 ], [ "▁envy", -13.376704216003418 ], [ "▁stomac", -13.376851081848145 ], [ "Donnell", -13.376914978027344 ], [ "▁primordial", -13.377249717712402 ], [ "reclining", -13.377274513244629 ], [ "PASS", -13.377861976623535 ], [ "▁Resistance", -13.377910614013672 ], [ "▁Widerruf", -13.377911567687988 ], [ "▁vodka", -13.377911567687988 ], [ "▁yolk", -13.377912521362305 ], [ "ollywood", -13.377915382385254 ], [ "▁truffle", -13.377933502197266 ], [ "▁Sänger", -13.377955436706543 ], [ "▁Kenntnis", -13.377968788146973 ], [ "▁Kiel", -13.37803840637207 ], [ "▁Mutual", -13.378044128417969 ], [ "▁saliva", -13.37816047668457 ], [ "▁renforce", -13.378411293029785 ], [ "▁mulch", -13.378680229187012 ], [ "▁reviste", -13.378875732421875 ], [ "lucrarea", -13.378978729248047 ], [ "▁multiply", -13.379130363464355 ], [ "▁marshmallow", -13.379234313964844 ], [ "▁Durchschnitt", -13.379288673400879 ], [ "▁Authorities", -13.379426002502441 ], [ "▁greed", -13.379521369934082 ], [ "Visiting", -13.379638671875 ], [ "Carlton", -13.379727363586426 ], [ "▁splend", -13.37975025177002 ], [ "▁Erkenntnisse", -13.379898071289062 ], [ "▁Russie", -13.379916191101074 ], [ "Agence", -13.38007926940918 ], [ "schickt", -13.380288124084473 ], [ " -13.3804931640625 ], [ "▁Erweiterung", -13.380560874938965 ], [ "▁Franchise", -13.380560874938965 ], [ "Dedicated", -13.380563735961914 ], [ "▁Wisdom", -13.380569458007812 ], [ "▁gagnant", -13.380592346191406 ], [ "planetary", -13.380598068237305 ], [ "▁affinity", -13.380619049072266 ], [ "▁préférence", -13.380739212036133 ], [ "▁intellect", -13.380810737609863 ], [ "▁Translat", -13.380830764770508 ], [ "▁Sultan", -13.38089370727539 ], [ "▁birouri", -13.38101577758789 ], [ "▁Academie", -13.381224632263184 ], [ "▁consequential", -13.38138484954834 ], [ "▁festgestellt", -13.381402015686035 ], [ "▁Chanel", -13.381444931030273 ], [ "▁soutenu", -13.381875038146973 ], [ "▁Montessori", -13.381888389587402 ], [ "▁equitable", -13.381892204284668 ], [ "▁théorie", -13.381893157958984 ], [ "▁primavara", -13.3818941116333 ], [ "▁Daughter", -13.38189697265625 ], [ "▁Dixon", -13.381898880004883 ], [ "▁unravel", -13.38190746307373 ], [ "Olimp", -13.381915092468262 ], [ "▁disturbed", -13.381916999816895 ], [ "▁novelty", -13.382004737854004 ], [ "synchronous", -13.382113456726074 ], [ "relevant", -13.382166862487793 ], [ "bourgeois", -13.38251781463623 ], [ "▁Parfum", -13.38255500793457 ], [ "▁Polonia", -13.382563591003418 ], [ "▁monoton", -13.382781028747559 ], [ "tratare", -13.38302230834961 ], [ "dumping", -13.38318157196045 ], [ "▁Bibliothek", -13.383217811584473 ], [ "▁Saskatchewan", -13.383217811584473 ], [ "▁experiential", -13.383217811584473 ], [ "▁verursacht", -13.383217811584473 ], [ "intègre", -13.383218765258789 ], [ "▁Intermediate", -13.383275032043457 ], [ "Israel", -13.383476257324219 ], [ "lucreaza", -13.383495330810547 ], [ "▁quantify", -13.383862495422363 ], [ "▁zahăr", -13.383882522583008 ], [ "▁încadr", -13.383902549743652 ], [ "Personalized", -13.383946418762207 ], [ "▁Chronic", -13.384309768676758 ], [ "hôpital", -13.384549140930176 ], [ "▁diskutiert", -13.384549140930176 ], [ "electrique", -13.3848876953125 ], [ "ethos", -13.384978294372559 ], [ "Nase", -13.385059356689453 ], [ "atmosphère", -13.385214805603027 ], [ "▁ungefähr", -13.385215759277344 ], [ "évaluer", -13.385251998901367 ], [ "▁scuz", -13.385321617126465 ], [ "haltige", -13.38533878326416 ], [ "January", -13.38557243347168 ], [ "▁Sharma", -13.385603904724121 ], [ "▁seizures", -13.385881423950195 ], [ "▁zucchini", -13.385881423950195 ], [ "▁Stadi", -13.385885238647461 ], [ "▁eccentric", -13.385885238647461 ], [ "▁offensichtlich", -13.385909080505371 ], [ "▁Irvine", -13.385920524597168 ], [ "cuprinse", -13.38601303100586 ], [ "▁Arbitr", -13.386157035827637 ], [ "Buenos", -13.386183738708496 ], [ "▁Shelter", -13.386210441589355 ], [ "CEPT", -13.386454582214355 ], [ "ouvri", -13.386455535888672 ], [ "acryl", -13.386539459228516 ], [ "▁Gourmet", -13.38654899597168 ], [ "scented", -13.386595726013184 ], [ "doubling", -13.38659954071045 ], [ "▁rafina", -13.386608123779297 ], [ "▁Vereinbarung", -13.38721752166748 ], [ "▁Dashboard", -13.387218475341797 ], [ "▁Sandwich", -13.387218475341797 ], [ "▁Riviera", -13.387226104736328 ], [ "échec", -13.387237548828125 ], [ "Giro", -13.387253761291504 ], [ "▁oasis", -13.38725757598877 ], [ "▁apology", -13.3872709274292 ], [ "▁YEAR", -13.387272834777832 ], [ "▁realtor", -13.387504577636719 ], [ "acheteur", -13.38754653930664 ], [ "▁larva", -13.387613296508789 ], [ "▁invitați", -13.388097763061523 ], [ "exhibiting", -13.38830852508545 ], [ "modernen", -13.388331413269043 ], [ "▁Collaboration", -13.38855266571045 ], [ "▁dezvălui", -13.38855266571045 ], [ "▁kiosk", -13.38855266571045 ], [ "▁Bermuda", -13.388553619384766 ], [ "Copiii", -13.388564109802246 ], [ "▁goddess", -13.388581275939941 ], [ "uplifting", -13.388609886169434 ], [ "▁simultan", -13.388808250427246 ], [ "▁episod", -13.388884544372559 ], [ "▁Braşov", -13.38922119140625 ], [ "cunoscută", -13.389634132385254 ], [ "▁Cherokee", -13.389890670776367 ], [ "▁Kazakhstan", -13.389890670776367 ], [ "▁Lauderdale", -13.389890670776367 ], [ "▁închisoare", -13.389898300170898 ], [ "▁Christchurch", -13.389934539794922 ], [ "▁influenţ", -13.389982223510742 ], [ "▁Meghan", -13.390019416809082 ], [ "▁Dienstleistung", -13.390557289123535 ], [ "▁cladiri", -13.390564918518066 ], [ "▁evrei", -13.391148567199707 ], [ "▁oatmeal", -13.391230583190918 ], [ "▁chronique", -13.3912353515625 ], [ "▁associée", -13.391264915466309 ], [ "▁Goose", -13.391283988952637 ], [ "gänz", -13.391855239868164 ], [ "▁Blätter", -13.391901969909668 ], [ "▁jurnalist", -13.392212867736816 ], [ "cedat", -13.392263412475586 ], [ "nommée", -13.392315864562988 ], [ "écrivain", -13.392572402954102 ], [ "▁epoxy", -13.392577171325684 ], [ "▁verlangt", -13.392590522766113 ], [ "Störung", -13.392708778381348 ], [ "▁Doyle", -13.392729759216309 ], [ "▁Philharmoni", -13.392844200134277 ], [ "▁déclare", -13.393044471740723 ], [ "effort", -13.393045425415039 ], [ "ström", -13.393118858337402 ], [ "▁cunoaşte", -13.393244743347168 ], [ "▁gigantic", -13.3932466506958 ], [ "któ", -13.393378257751465 ], [ "▁ilustr", -13.393529891967773 ], [ "▁frec", -13.39371109008789 ], [ "▁Syracuse", -13.393916130065918 ], [ "▁Einwilligung", -13.393917083740234 ], [ "▁miraculous", -13.393917083740234 ], [ "▁ökologisch", -13.393917083740234 ], [ "▁Simmons", -13.393922805786133 ], [ "▁albastru", -13.393926620483398 ], [ "besser", -13.393962860107422 ], [ "▁interioare", -13.394006729125977 ], [ "▁Trocken", -13.394068717956543 ], [ "niveau", -13.39406967163086 ], [ "▁Torah", -13.394122123718262 ], [ "▁beobachten", -13.3945894241333 ], [ "▁behandeln", -13.394637107849121 ], [ "staffed", -13.394742965698242 ], [ "hütte", -13.394824028015137 ], [ "Central", -13.394939422607422 ], [ "▁Freiburg", -13.395198822021484 ], [ "▁Netanyahu", -13.395261764526367 ], [ "▁Lexington", -13.395302772521973 ], [ "▁insotit", -13.395492553710938 ], [ "▁depasi", -13.39560604095459 ], [ "sewage", -13.395853996276855 ], [ "erkrankung", -13.395951271057129 ], [ "▁părţi", -13.396234512329102 ], [ "▁Nixon", -13.39661693572998 ], [ "Byron", -13.396905899047852 ], [ "▁varietat", -13.39724063873291 ], [ "▁Bildschirm", -13.397299766540527 ], [ "▁accompli", -13.397424697875977 ], [ "affirmed", -13.397525787353516 ], [ "▁phyto", -13.397533416748047 ], [ "sectiune", -13.397592544555664 ], [ "abteilung", -13.397932052612305 ], [ "▁voastre", -13.397957801818848 ], [ "GitHub", -13.397958755493164 ], [ "▁Jorge", -13.39796257019043 ], [ "ACTION", -13.397972106933594 ], [ "voastra", -13.397984504699707 ], [ "▁Peanut", -13.397987365722656 ], [ "▁bilingual", -13.398011207580566 ], [ "▁nourriture", -13.39803695678711 ], [ "▁Asphalt", -13.398640632629395 ], [ "emballage", -13.399310111999512 ], [ "▁sanitation", -13.399310111999512 ], [ "▁Dessert", -13.399313926696777 ], [ "intitulé", -13.399322509765625 ], [ "▁acţiune", -13.399374008178711 ], [ "▁Übersetzung", -13.399402618408203 ], [ "destinate", -13.39941692352295 ], [ "▁Goddess", -13.399504661560059 ], [ "poziție", -13.399576187133789 ], [ "denumirea", -13.400002479553223 ], [ "cantitatea", -13.40002727508545 ], [ "▁Stereo", -13.400223731994629 ], [ "object", -13.400373458862305 ], [ "▁décè", -13.40058708190918 ], [ "▁Handeln", -13.400665283203125 ], [ "▁ambience", -13.400697708129883 ], [ "▁Lindsay", -13.4006986618042 ], [ "▁tensiune", -13.400781631469727 ], [ "▁thrift", -13.400788307189941 ], [ "▁Optimiz", -13.400843620300293 ], [ "▁beantworten", -13.401338577270508 ], [ "▁magistrat", -13.401342391967773 ], [ "évidence", -13.402016639709473 ], [ "▁Eclipse", -13.402016639709473 ], [ "▁Ribbon", -13.402016639709473 ], [ "▁condensation", -13.402016639709473 ], [ "▁innocence", -13.402018547058105 ], [ "▁mascara", -13.402023315429688 ], [ "▁seventeen", -13.402290344238281 ], [ "▁compétent", -13.402694702148438 ], [ "bewertet", -13.402717590332031 ], [ "▁Muzic", -13.40285587310791 ], [ "complexities", -13.402928352355957 ], [ "ddington", -13.403324127197266 ], [ "Entwickler", -13.403372764587402 ], [ "masonry", -13.4033784866333 ], [ "Führer", -13.403386116027832 ], [ "▁awakening", -13.403388977050781 ], [ "▁lovitur", -13.403806686401367 ], [ "gebrochen", -13.404068946838379 ], [ "indexed", -13.404478073120117 ], [ "campania", -13.404515266418457 ], [ "▁Fountain", -13.404730796813965 ], [ "▁Joomla", -13.404730796813965 ], [ "▁Superintendent", -13.404730796813965 ], [ "▁Dahl", -13.404742240905762 ], [ "▁Benefici", -13.404863357543945 ], [ "optimiser", -13.404919624328613 ], [ "bursting", -13.405380249023438 ], [ "diplom", -13.405427932739258 ], [ "microsoft", -13.405621528625488 ], [ "▁correlate", -13.405776977539062 ], [ "▁arhitectura", -13.405848503112793 ], [ "▁lunette", -13.40611743927002 ], [ "Statistical", -13.406147003173828 ], [ "▁iarnă", -13.406201362609863 ], [ "▁importanț", -13.406932830810547 ], [ "sistence", -13.407366752624512 ], [ "associated", -13.407402992248535 ], [ "Occident", -13.407452583312988 ], [ "▁Heidelberg", -13.407452583312988 ], [ "▁acquaintance", -13.407452583312988 ], [ "Introducing", -13.407453536987305 ], [ "▁ripple", -13.407480239868164 ], [ "▁Childhood", -13.407563209533691 ], [ "drywall", -13.407577514648438 ], [ "Vreau", -13.40771770477295 ], [ "▁compétence", -13.407967567443848 ], [ "▁asteapta", -13.408135414123535 ], [ "▁duhovnic", -13.408135414123535 ], [ "▁învăţământ", -13.408141136169434 ], [ "encompassing", -13.40829849243164 ], [ "1997)", -13.408370018005371 ], [ "▁atractiv", -13.408515930175781 ], [ "Majoritatea", -13.408775329589844 ], [ "▁bungalow", -13.40881633758545 ], [ "▁Introduce", -13.408817291259766 ], [ "▁culprit", -13.408817291259766 ], [ "▁malheureusement", -13.408817291259766 ], [ "▁voudrai", -13.408817291259766 ], [ "Europäische", -13.408825874328613 ], [ "wunsch", -13.408880233764648 ], [ "▁înțeles", -13.408892631530762 ], [ "▁infestation", -13.40889835357666 ], [ "Bringing", -13.409186363220215 ], [ "▁Mehrheit", -13.409229278564453 ], [ "ски", -13.409456253051758 ], [ "▁procéder", -13.409499168395996 ], [ "grupului", -13.409504890441895 ], [ "▁dispoziti", -13.40964412689209 ], [ "▁snug", -13.409950256347656 ], [ "▁Afrika", -13.41018295288086 ], [ "▁Madagascar", -13.41018295288086 ], [ "Părinte", -13.410195350646973 ], [ "▁Clayton", -13.410223960876465 ], [ "▁antagonist", -13.410239219665527 ], [ "termeni", -13.410250663757324 ], [ "▁Literary", -13.410391807556152 ], [ "▁Babylon", -13.410452842712402 ], [ "▁überprüfen", -13.410865783691406 ], [ "▁duminica", -13.410879135131836 ], [ "farbig", -13.410970687866211 ], [ "nennt", -13.411064147949219 ], [ "annual", -13.411487579345703 ], [ "▁Qualcomm", -13.41154956817627 ], [ "▁Slovakia", -13.41154956817627 ], [ "▁plictis", -13.411552429199219 ], [ "▁prairie", -13.411554336547852 ], [ "▁Schatten", -13.411622047424316 ], [ "▁compléter", -13.41223430633545 ], [ "inauguration", -13.412376403808594 ], [ "▁apărare", -13.412407875061035 ], [ "▁întăr", -13.412412643432617 ], [ "▁pronunciation", -13.412919044494629 ], [ "▁bewährt", -13.412919998168945 ], [ "▁Viertel", -13.413084983825684 ], [ "▁Heidi", -13.413252830505371 ], [ "▁Gummi", -13.413507461547852 ], [ "▁veggie", -13.413552284240723 ], [ "▁monsieur", -13.413604736328125 ], [ "éveil", -13.413630485534668 ], [ "shipments", -13.413928985595703 ], [ "▁Medikamente", -13.414290428161621 ], [ "▁Johannesburg", -13.414314270019531 ], [ "▁ermittelt", -13.414321899414062 ], [ "▁bataille", -13.414440155029297 ], [ "extrem", -13.414609909057617 ], [ "▁1:2", -13.414671897888184 ], [ "Array", -13.414725303649902 ], [ "▁portail", -13.414857864379883 ], [ "▁găzdui", -13.414977073669434 ], [ "▁Calcium", -13.41497802734375 ], [ "▁Correction", -13.415104866027832 ], [ "bureaux", -13.41528034210205 ], [ "bestselling", -13.415338516235352 ], [ "Übungen", -13.415420532226562 ], [ "paramètres", -13.415633201599121 ], [ "▁Provincial", -13.415663719177246 ], [ "▁outrageous", -13.415680885314941 ], [ "▁Giveaway", -13.415775299072266 ], [ "▁LGBTQ", -13.41589641571045 ], [ "geklärt", -13.416854858398438 ], [ "▁Karlsruhe", -13.417038917541504 ], [ "▁esențial", -13.417038917541504 ], [ "avancée", -13.41703987121582 ], [ "hesitant", -13.417040824890137 ], [ "enlarged", -13.417069435119629 ], [ "▁inherit", -13.417121887207031 ], [ "Food", -13.4171724319458 ], [ "bucuria", -13.417181015014648 ], [ "▁BTW", -13.417400360107422 ], [ "associe", -13.417579650878906 ], [ "▁Möchte", -13.417742729187012 ], [ "demokrat", -13.417789459228516 ], [ "Turcia", -13.417964935302734 ], [ "forged", -13.418370246887207 ], [ "▁Zhao", -13.418442726135254 ], [ "▁cherries", -13.418556213378906 ], [ "▁evangelical", -13.418631553649902 ], [ "▁jüng", -13.418792724609375 ], [ "spans", -13.41880989074707 ], [ "▁străluc", -13.41888427734375 ], [ "▁geschie", -13.41893196105957 ], [ "▁Tattoo", -13.419112205505371 ], [ "sanitary", -13.419114112854004 ], [ "▁biopsy", -13.419353485107422 ], [ "▁imprumut", -13.419795036315918 ], [ "▁unreasonable", -13.419795036315918 ], [ "Funktion", -13.419800758361816 ], [ "▁prohibition", -13.419904708862305 ], [ "▁Prezent", -13.419939041137695 ], [ "boosted", -13.419967651367188 ], [ "▁chalet", -13.420382499694824 ], [ "▁tanar", -13.420450210571289 ], [ "Faktoren", -13.420489311218262 ], [ "▁Mozilla", -13.420550346374512 ], [ "▁Lambert", -13.420760154724121 ], [ "▁Cruci", -13.420927047729492 ], [ "▁Flugzeug", -13.421198844909668 ], [ "reassure", -13.421205520629883 ], [ "envisioned", -13.421542167663574 ], [ "Traditionally", -13.421773910522461 ], [ "▁parametri", -13.42185115814209 ], [ "▁unicorn", -13.421891212463379 ], [ "▁adéquat", -13.421894073486328 ], [ "▁Colonial", -13.421915054321289 ], [ "▁Kwa", -13.422097206115723 ], [ "▁SERV", -13.422333717346191 ], [ "tourism", -13.422627449035645 ], [ "▁Kiev", -13.422974586486816 ], [ "heightened", -13.42309284210205 ], [ "circulating", -13.423099517822266 ], [ "▁Kreditkarte", -13.42310619354248 ], [ "gedruckt", -13.423110008239746 ], [ "▁Depend", -13.423120498657227 ], [ "Style", -13.423196792602539 ], [ "▁Rettungs", -13.42325496673584 ], [ "wrongful", -13.423418998718262 ], [ "▁devour", -13.423453330993652 ], [ "▁manevr", -13.423582077026367 ], [ "carora", -13.423628807067871 ], [ "erfolgreichen", -13.423723220825195 ], [ "überwiegend", -13.423942565917969 ], [ "▁Sauvignon", -13.423942565917969 ], [ "händler", -13.423944473266602 ], [ "▁annotation", -13.424009323120117 ], [ "▁expans", -13.424020767211914 ], [ "▁recital", -13.424080848693848 ], [ "inhabited", -13.424367904663086 ], [ "OnePlus", -13.424549102783203 ], [ "Gästen", -13.424588203430176 ], [ "beliebig", -13.424613952636719 ], [ "▁Anonymous", -13.424635887145996 ], [ "▁Ansprechpartner", -13.424635887145996 ], [ "▁tamb", -13.42464542388916 ], [ "estimating", -13.424670219421387 ], [ "frequent", -13.424769401550293 ], [ "▁disciplin", -13.425241470336914 ], [ "▁plombier", -13.425329208374023 ], [ "▁teoretic", -13.42533016204834 ], [ "greift", -13.425339698791504 ], [ "▁Einschränkung", -13.42537784576416 ], [ "obscur", -13.426115989685059 ], [ "architecte", -13.426233291625977 ], [ "▁détour", -13.42647647857666 ], [ "▁spaghetti", -13.426717758178711 ], [ "croft", -13.42693042755127 ], [ "▁Grammar", -13.426953315734863 ], [ "▁investitii", -13.427062034606934 ], [ "▁glorif", -13.427067756652832 ], [ "architekt", -13.427412033081055 ], [ "Oricum", -13.427451133728027 ], [ "▁bruise", -13.427692413330078 ], [ "▁McCarthy", -13.428107261657715 ], [ "▁Uruguay", -13.428107261657715 ], [ "Produsele", -13.428109169006348 ], [ "▁Comparison", -13.42811107635498 ], [ "▁fondamental", -13.42811107635498 ], [ "▁stradă", -13.428115844726562 ], [ "▁Countries", -13.428131103515625 ], [ "▁guéri", -13.42825698852539 ], [ "▁bâti", -13.428339004516602 ], [ "▁blunt", -13.428515434265137 ], [ "▁Sistem", -13.428645133972168 ], [ "▁Betroffenen", -13.428803443908691 ], [ "efectuare", -13.428823471069336 ], [ "▁scharf", -13.428899765014648 ], [ "naps", -13.429057121276855 ], [ "▁plaid", -13.429163932800293 ], [ "▁investiții", -13.429367065429688 ], [ "evenimentele", -13.42948055267334 ], [ "▁Phuket", -13.429499626159668 ], [ "▁testosterone", -13.429499626159668 ], [ "▁scaffold", -13.429500579833984 ], [ "▁rasch", -13.430022239685059 ], [ "▁adânc", -13.430076599121094 ], [ "atteinte", -13.430228233337402 ], [ "▁educație", -13.430320739746094 ], [ "▁leopard", -13.430893898010254 ], [ "▁superioare", -13.430893898010254 ], [ "▁téléchargement", -13.430893898010254 ], [ "▁Weapon", -13.431103706359863 ], [ "favourable", -13.431336402893066 ], [ "nourishing", -13.43143367767334 ], [ "▁verfolgt", -13.43160629272461 ], [ "▁tablou", -13.431633949279785 ], [ "Algérie", -13.431657791137695 ], [ "Islam", -13.431700706481934 ], [ "faser", -13.431825637817383 ], [ "rhythm", -13.432214736938477 ], [ "▁Anthropolog", -13.432291030883789 ], [ "▁clôtur", -13.432291030883789 ], [ "spüren", -13.432291984558105 ], [ "▁Architectural", -13.432294845581055 ], [ "▁imaginary", -13.432368278503418 ], [ "cône", -13.432456016540527 ], [ "▁snuggl", -13.432744026184082 ], [ "disadvantaged", -13.432745933532715 ], [ "radically", -13.4329195022583 ], [ "Première", -13.433011054992676 ], [ "▁combinaison", -13.433027267456055 ], [ "▁Algeria", -13.43303108215332 ], [ "▁Wände", -13.43317985534668 ], [ "aesthetically", -13.43336009979248 ], [ "▁McKe", -13.433368682861328 ], [ "interroge", -13.433473587036133 ], [ "exclusive", -13.433475494384766 ], [ "▁Thomson", -13.433688163757324 ], [ "▁Gujarat", -13.43368911743164 ], [ "irgendwo", -13.433690071105957 ], [ "Severin", -13.433767318725586 ], [ "▁imitation", -13.433926582336426 ], [ "constructed", -13.434194564819336 ], [ "▁Montpellier", -13.434388160705566 ], [ "cedent", -13.434539794921875 ], [ "accelerating", -13.434563636779785 ], [ "dommages", -13.4346284866333 ], [ "lideri", -13.434730529785156 ], [ "▁Millennium", -13.435089111328125 ], [ "▁imprisonment", -13.435089111328125 ], [ "machining", -13.435111999511719 ], [ "▁anxiet", -13.43521499633789 ], [ "Contains", -13.435298919677734 ], [ "pleade", -13.435563087463379 ], [ "DOWN", -13.43564510345459 ], [ "geschehen", -13.435797691345215 ], [ "restaurant", -13.435811996459961 ], [ "Totusi", -13.435839653015137 ], [ "amintesc", -13.436158180236816 ], [ "▁Crisp", -13.436233520507812 ], [ "aduse", -13.436278343200684 ], [ "▁imposé", -13.436351776123047 ], [ "Jubiläum", -13.436490058898926 ], [ "▁Plaintiff", -13.436491012573242 ], [ "▁authoritative", -13.436491966247559 ], [ "▁rendition", -13.436633110046387 ], [ "Royce", -13.436707496643066 ], [ "1996)", -13.436724662780762 ], [ "Asociația", -13.437192916870117 ], [ "▁Gluten", -13.437264442443848 ], [ "feature", -13.43741226196289 ], [ "Behavioral", -13.437454223632812 ], [ "tearing", -13.437763214111328 ], [ "▁Entfernung", -13.437894821166992 ], [ "▁Responsibility", -13.437894821166992 ], [ "▁negligent", -13.437894821166992 ], [ "▁syllabus", -13.437894821166992 ], [ "▁Cycling", -13.437895774841309 ], [ "generell", -13.438114166259766 ], [ "customised", -13.438392639160156 ], [ "Management", -13.43850326538086 ], [ "▁timid", -13.438518524169922 ], [ "Tagged", -13.438730239868164 ], [ "▁susţinut", -13.438809394836426 ], [ "anchored", -13.43892765045166 ], [ "alternating", -13.439055442810059 ], [ "▁obligatoriu", -13.439300537109375 ], [ "▁reinstate", -13.439456939697266 ], [ "Können", -13.43946361541748 ], [ "▁Paol", -13.439596176147461 ], [ "öhr", -13.439603805541992 ], [ "▁Asociati", -13.439876556396484 ], [ "▁commenc", -13.440285682678223 ], [ "reinigt", -13.440293312072754 ], [ "commended", -13.440350532531738 ], [ "▁Proceed", -13.440675735473633 ], [ "beutel", -13.440702438354492 ], [ "▁Experimental", -13.44070816040039 ], [ "▁constellation", -13.44070816040039 ], [ "▁gepflegt", -13.44070816040039 ], [ "▁Ergänzung", -13.440709114074707 ], [ "Judith", -13.440713882446289 ], [ "▁Quartet", -13.440720558166504 ], [ "complemented", -13.440742492675781 ], [ "ausbildung", -13.440750122070312 ], [ "▁uncertainties", -13.44077205657959 ], [ "▁humiliat", -13.440914154052734 ], [ "luta", -13.441121101379395 ], [ "▁complexion", -13.441482543945312 ], [ "Serviciul", -13.441612243652344 ], [ "▁Toast", -13.441722869873047 ], [ "ummies", -13.442425727844238 ], [ "▁irit", -13.442463874816895 ], [ "producing", -13.442585945129395 ], [ "amenajare", -13.442825317382812 ], [ "▁béton", -13.442828178405762 ], [ "▁serpent", -13.442851066589355 ], [ "▁vizită", -13.442996978759766 ], [ "▁Beamte", -13.443017959594727 ], [ "▁Füße", -13.443166732788086 ], [ "▁Norwich", -13.443531036376953 ], [ "▁acronym", -13.443531036376953 ], [ "▁eradicate", -13.443531036376953 ], [ "▁solidarité", -13.44353199005127 ], [ "▁eggplant", -13.443582534790039 ], [ "▁sailors", -13.443619728088379 ], [ "waschen", -13.444538116455078 ], [ "Editura", -13.444757461547852 ], [ "▁erwerben", -13.444944381713867 ], [ "▁unconventional", -13.444944381713867 ], [ "▁boulder", -13.444948196411133 ], [ "Diplom", -13.445013046264648 ], [ "influx", -13.446162223815918 ], [ "▁Twelve", -13.446361541748047 ], [ "▁Sexual", -13.44636344909668 ], [ "numite", -13.446369171142578 ], [ "▁kontaktieren", -13.446370124816895 ], [ "▁strâns", -13.44637680053711 ], [ "▁précisément", -13.446382522583008 ], [ "empfindlich", -13.446405410766602 ], [ "▁divulg", -13.446490287780762 ], [ "▁delicat", -13.446539878845215 ], [ "compete", -13.446542739868164 ], [ "▁implique", -13.446616172790527 ], [ "implantation", -13.44672966003418 ], [ "frères", -13.447328567504883 ], [ "shedding", -13.44758415222168 ], [ "découvrez", -13.447657585144043 ], [ "rith", -13.447735786437988 ], [ "▁réglementation", -13.447778701782227 ], [ "▁transistor", -13.447785377502441 ], [ "inflated", -13.447792053222656 ], [ "▁Bluff", -13.447887420654297 ], [ "▁Aquarium", -13.448526382446289 ], [ "▁mananc", -13.448638916015625 ], [ "▁disinfect", -13.448700904846191 ], [ "tuft", -13.448740005493164 ], [ "Public", -13.449081420898438 ], [ "conceivabl", -13.449197769165039 ], [ "▁Cadillac", -13.449197769165039 ], [ "Assassin", -13.449199676513672 ], [ "issuance", -13.449252128601074 ], [ "▁Achtung", -13.449287414550781 ], [ "▁grundlegend", -13.449909210205078 ], [ "▁Băsescu", -13.449910163879395 ], [ "schaden", -13.45014476776123 ], [ "coached", -13.450409889221191 ], [ "▁betreffend", -13.45046329498291 ], [ "ergebnis", -13.450541496276855 ], [ "▁Lieutenant", -13.4506196975708 ], [ "WORLD", -13.450620651245117 ], [ "▁Moroccan", -13.450620651245117 ], [ "▁Butterfly", -13.450621604919434 ], [ "would", -13.450737953186035 ], [ "▁Metropol", -13.451025009155273 ], [ "lexic", -13.451192855834961 ], [ "comunitatea", -13.45124340057373 ], [ "vapeur", -13.451456069946289 ], [ "4.000", -13.451559066772461 ], [ "Pentru", -13.451581954956055 ], [ "üblichen", -13.451613426208496 ], [ "▁Général", -13.451770782470703 ], [ "▁Versailles", -13.452046394348145 ], [ "▁engraving", -13.452046394348145 ], [ "▁pédagogique", -13.452192306518555 ], [ "▁Policies", -13.452759742736816 ], [ "descending", -13.453235626220703 ], [ "stärkt", -13.453349113464355 ], [ "▁démocratie", -13.453470230102539 ], [ "▁granddaughter", -13.453470230102539 ], [ "▁buffalo", -13.453474998474121 ], [ "Datorita", -13.45347785949707 ], [ "hydroxy", -13.453537940979004 ], [ "▁ganduri", -13.453566551208496 ], [ "▁hijack", -13.453624725341797 ], [ "zahn", -13.453699111938477 ], [ "poziția", -13.45406436920166 ], [ "▁Zähne", -13.454184532165527 ], [ "▁grossesse", -13.454296112060547 ], [ "embassy", -13.4548978805542 ], [ "▁cérémonie", -13.4548978805542 ], [ "Rhône", -13.454898834228516 ], [ "▁Cabernet", -13.454898834228516 ], [ "▁Namibia", -13.454902648925781 ], [ "▁pedestal", -13.454902648925781 ], [ "▁Fighting", -13.45490550994873 ], [ "▁Threat", -13.454962730407715 ], [ "▁ideological", -13.455047607421875 ], [ "▁restitu", -13.455183029174805 ], [ "gelangt", -13.455510139465332 ], [ "Mitgliedern", -13.455537796020508 ], [ "acquérir", -13.455613136291504 ], [ "▁inferioar", -13.45561695098877 ], [ "Thierry", -13.455619812011719 ], [ "▁Entspannung", -13.455638885498047 ], [ "frequency", -13.45566177368164 ], [ "▁Fluid", -13.455686569213867 ], [ "▁betreut", -13.455901145935059 ], [ "Biological", -13.455965995788574 ], [ "▁Constanţa", -13.456328392028809 ], [ "▁beschäftigen", -13.456328392028809 ], [ "▁undesirable", -13.456328392028809 ], [ "▁protégé", -13.456365585327148 ], [ "▁nautical", -13.456474304199219 ], [ "▁sniff", -13.456507682800293 ], [ "Decizi", -13.456510543823242 ], [ "▁căldur", -13.45706558227539 ], [ "▁ideologi", -13.457335472106934 ], [ "Fraktion", -13.457545280456543 ], [ "collegiate", -13.45776081085205 ], [ "▁sănătos", -13.45776081085205 ], [ "▁Observatory", -13.45776653289795 ], [ "▁saturation", -13.457769393920898 ], [ "organizate", -13.457771301269531 ], [ "mergem", -13.458321571350098 ], [ "Publish", -13.458451271057129 ], [ "▁rattle", -13.458460807800293 ], [ "▁întâlniri", -13.458663940429688 ], [ "emporte", -13.458741188049316 ], [ "▁înscris", -13.459046363830566 ], [ "▁Patterson", -13.459195137023926 ], [ "▁ehrenamtlich", -13.459195137023926 ], [ "linux", -13.459213256835938 ], [ "conduire", -13.45921802520752 ], [ "▁absolven", -13.459223747253418 ], [ "▁einzigartig", -13.459598541259766 ], [ "▁_____", -13.459803581237793 ], [ "▁Beschäftigung", -13.459912300109863 ], [ "▁erfasst", -13.459927558898926 ], [ "▁Datum", -13.459992408752441 ], [ "raportul", -13.460284233093262 ], [ "ennemi", -13.460460662841797 ], [ "default", -13.460643768310547 ], [ "icillin", -13.46066951751709 ], [ "▁diamant", -13.460671424865723 ], [ "amerika", -13.460684776306152 ], [ "▁pescuit", -13.46070384979248 ], [ "▁grappl", -13.460797309875488 ], [ "▁Homeland", -13.46082592010498 ], [ "▁tromb", -13.46112060546875 ], [ "▁reduzieren", -13.461349487304688 ], [ "▁Statut", -13.461593627929688 ], [ "booming", -13.461670875549316 ], [ "fenced", -13.461723327636719 ], [ "measure", -13.461888313293457 ], [ "témoin", -13.462069511413574 ], [ "▁Inventory", -13.462069511413574 ], [ "▁circonstance", -13.462069511413574 ], [ "▁téléphonique", -13.462069511413574 ], [ "▁împiedic", -13.46207046508789 ], [ "▁Settlement", -13.462072372436523 ], [ "kannte", -13.462076187133789 ], [ "▁substantive", -13.462385177612305 ], [ "miterea", -13.462642669677734 ], [ "▁noştri", -13.462790489196777 ], [ "▁plăcere", -13.462791442871094 ], [ "▁eticheta", -13.462823867797852 ], [ "quickest", -13.462993621826172 ], [ "▁pasageri", -13.463089942932129 ], [ "▁Publi", -13.463495254516602 ], [ "▁Suzanne", -13.463509559631348 ], [ "▁bucătări", -13.463509559631348 ], [ "Regulatory", -13.463510513305664 ], [ "▁Mandarin", -13.463647842407227 ], [ "surgical", -13.463947296142578 ], [ "▁Smash", -13.463950157165527 ], [ "▁mândr", -13.46403694152832 ], [ "▁Unterkunft", -13.464315414428711 ], [ "moos", -13.464374542236328 ], [ "Camere", -13.464510917663574 ], [ "/03/", -13.464651107788086 ], [ "▁ethno", -13.464677810668945 ], [ "▁Eröffnung", -13.46495246887207 ], [ "▁Snyder", -13.46495246887207 ], [ "▁Wilmington", -13.46495246887207 ], [ "▁Canberra", -13.464953422546387 ], [ "▁Tahoe", -13.464953422546387 ], [ "▁slippery", -13.464953422546387 ], [ "▁Snake", -13.464957237243652 ], [ "▁turmeric", -13.464963912963867 ], [ "▁Cartoon", -13.46499252319336 ], [ "▁scrisoare", -13.46500015258789 ], [ "▁reprend", -13.465425491333008 ], [ "▁Konkurrenz", -13.46567440032959 ], [ "▁raisins", -13.465693473815918 ], [ "▁Werkstatt", -13.465713500976562 ], [ "▁agresiv", -13.465795516967773 ], [ "hugs", -13.46615219116211 ], [ "cazurile", -13.46618938446045 ], [ "spirited", -13.466232299804688 ], [ "▁britisch", -13.466307640075684 ], [ "spritz", -13.466367721557617 ], [ "auxiliary", -13.46639633178711 ], [ "interprétation", -13.46639633178711 ], [ "▁verbindet", -13.46639633178711 ], [ "▁fuzzy", -13.466429710388184 ], [ "▁turmoil", -13.466432571411133 ], [ "▁redefine", -13.466819763183594 ], [ "▁Kiwi", -13.466890335083008 ], [ "oiseaux", -13.46712875366211 ], [ "▁pamper", -13.467146873474121 ], [ "▁desfaso", -13.46719741821289 ], [ "▁pragu", -13.467576026916504 ], [ "prevenirea", -13.467730522155762 ], [ "▁convergence", -13.467846870422363 ], [ "tufted", -13.467878341674805 ], [ "brewed", -13.467981338500977 ], [ "villagers", -13.468003273010254 ], [ "▁Irving", -13.468170166015625 ], [ "nigsten", -13.468660354614258 ], [ "▁embod", -13.468742370605469 ], [ "Alicia", -13.468938827514648 ], [ "probably", -13.469009399414062 ], [ "divider", -13.46904468536377 ], [ "Attempt", -13.469223022460938 ], [ "▁Cognitive", -13.469292640686035 ], [ "▁Recognition", -13.469292640686035 ], [ "▁concierge", -13.469292640686035 ], [ "▁Semester", -13.4692964553833 ], [ "Economie", -13.469417572021484 ], [ "sortiment", -13.469460487365723 ], [ "shortest", -13.46961498260498 ], [ "üchtig", -13.469650268554688 ], [ "▁conveyanc", -13.469978332519531 ], [ "▁Ferdinand", -13.470017433166504 ], [ "▁permanence", -13.470019340515137 ], [ "▁incadr", -13.470145225524902 ], [ "▁estrogen", -13.470290184020996 ], [ "February", -13.470661163330078 ], [ "gedeckt", -13.470704078674316 ], [ "▁reagieren", -13.470743179321289 ], [ "▁meditate", -13.470980644226074 ], [ "simulated", -13.471010208129883 ], [ "▁supprimer", -13.471468925476074 ], [ "▁bumbac", -13.47146987915039 ], [ "▁vânzări", -13.471477508544922 ], [ "▁Kapitel", -13.471478462219238 ], [ "▁Weltkrieg", -13.471513748168945 ], [ "déposer", -13.471674919128418 ], [ "Asus", -13.4718017578125 ], [ "▁Communicat", -13.471851348876953 ], [ "Finished", -13.47188949584961 ], [ "▁Telegraph", -13.472054481506348 ], [ "▁Competitive", -13.472196578979492 ], [ "▁collectivités", -13.472197532653809 ], [ "▁protège", -13.472199440002441 ], [ "▁scallop", -13.472219467163086 ], [ "Happy", -13.472335815429688 ], [ "tehnică", -13.472352981567383 ], [ "▁Gestalt", -13.47270393371582 ], [ "▁benign", -13.47295093536377 ], [ "kraut", -13.473149299621582 ], [ "louer", -13.473221778869629 ], [ "▁Printr", -13.47326946258545 ], [ "mputation", -13.473346710205078 ], [ "▁dicke", -13.473429679870605 ], [ "▁Halifax", -13.473650932312012 ], [ "▁bounty", -13.473650932312012 ], [ "▁cauliflower", -13.473650932312012 ], [ "▁Survival", -13.473654747009277 ], [ "▁Chandler", -13.473684310913086 ], [ "▁bemüh", -13.473760604858398 ], [ "phro", -13.473855972290039 ], [ "Friday", -13.474018096923828 ], [ "particularly", -13.474032402038574 ], [ "arteries", -13.474197387695312 ], [ "Lösung", -13.474771499633789 ], [ "▁causal", -13.474817276000977 ], [ "▁recueilli", -13.475075721740723 ], [ "Stylish", -13.47510814666748 ], [ "schränke", -13.47510814666748 ], [ "▁francophone", -13.47510814666748 ], [ "▁limousine", -13.47510814666748 ], [ "▁statistiques", -13.47510814666748 ], [ "▁Kleider", -13.475111961364746 ], [ "▁dunkel", -13.475127220153809 ], [ "tätigkeit", -13.475190162658691 ], [ "▁punished", -13.475257873535156 ], [ "▁implică", -13.475539207458496 ], [ "▁inițial", -13.475568771362305 ], [ "▁Eminescu", -13.475837707519531 ], [ "▁expliqué", -13.475837707519531 ], [ "▁Eduard", -13.475839614868164 ], [ "▁psychologique", -13.475870132446289 ], [ "▁protejeaz", -13.476580619812012 ], [ "spül", -13.476709365844727 ], [ "▁Virtu", -13.477021217346191 ], [ "▁régulière", -13.477044105529785 ], [ "▁Outreach", -13.477130889892578 ], [ "▁Apprentice", -13.47729778289795 ], [ "▁compréhension", -13.47729778289795 ], [ "▁zwölf", -13.47729778289795 ], [ "Surgical", -13.477315902709961 ], [ "latéral", -13.477417945861816 ], [ "▁Ceremony", -13.47803020477295 ], [ "▁Shampoo", -13.47803783416748 ], [ "Global", -13.478239059448242 ], [ "▁paradis", -13.478302955627441 ], [ "Developed", -13.478493690490723 ], [ "▁figurine", -13.478549003601074 ], [ "sujets", -13.478574752807617 ], [ "▁Naomi", -13.478772163391113 ], [ "financed", -13.478838920593262 ], [ "forestry", -13.478896141052246 ], [ "▁Anregung", -13.479494094848633 ], [ "▁spectateur", -13.479804039001465 ], [ "▁exercitii", -13.479815483093262 ], [ "▁russisch", -13.479888916015625 ], [ "gefunden", -13.479988098144531 ], [ "schleunig", -13.480225563049316 ], [ "▁géographique", -13.480225563049316 ], [ "▁Delphi", -13.480317115783691 ], [ "Freddie", -13.4806489944458 ], [ "▁muzici", -13.480958938598633 ], [ "▁Edmund", -13.48095989227295 ], [ "finanzielle", -13.481032371520996 ], [ "(2003)", -13.481319427490234 ], [ "accentuate", -13.481437683105469 ], [ "overlapping", -13.48151969909668 ], [ "▁Pluto", -13.481595993041992 ], [ "românii", -13.481683731079102 ], [ "▁Timişoara", -13.48169231414795 ], [ "▁poivr", -13.481754302978516 ], [ "▁repris", -13.481852531433105 ], [ "▁Geschlecht", -13.482426643371582 ], [ "▁thieves", -13.482426643371582 ], [ "▁Transformer", -13.482431411743164 ], [ "▁shortcomings", -13.482438087463379 ], [ "▁aptitude", -13.48244571685791 ], [ "pitfalls", -13.482468605041504 ], [ "▁manicure", -13.482577323913574 ], [ "mystical", -13.482723236083984 ], [ "▁abolish", -13.482833862304688 ], [ "▁Zielgruppe", -13.482873916625977 ], [ "▁naţionale", -13.483160972595215 ], [ "▁trandafir", -13.483160972595215 ], [ "▁matematic", -13.483193397521973 ], [ "▁Hirsch", -13.483257293701172 ], [ "Fahr", -13.483458518981934 ], [ "connaissent", -13.483476638793945 ], [ "browned", -13.483846664428711 ], [ "▁bearbeitet", -13.483881950378418 ], [ "▁usturoi", -13.483896255493164 ], [ "▁Surprise", -13.48389720916748 ], [ "▁Tehran", -13.483899116516113 ], [ "▁BLACK", -13.483901023864746 ], [ "▁abonament", -13.483904838562012 ], [ "▁mêl", -13.483972549438477 ], [ "Angebot", -13.484091758728027 ], [ "ajungi", -13.48410415649414 ], [ "▁Woodland", -13.48420524597168 ], [ "▁gradini", -13.484305381774902 ], [ "▁Marilyn", -13.48464584350586 ], [ "kilometer", -13.484880447387695 ], [ "tempered", -13.485230445861816 ], [ "▁intimacy", -13.485371589660645 ], [ "▁thunderstorm", -13.485373497009277 ], [ "▁Uttar", -13.485413551330566 ], [ "▁varnish", -13.485535621643066 ], [ "opathie", -13.485982894897461 ], [ "▁școlar", -13.48611068725586 ], [ "▁raisonnable", -13.486114501953125 ], [ "proactively", -13.486490249633789 ], [ "▁gib", -13.486536979675293 ], [ "▁hospice", -13.48684310913086 ], [ "▁constă", -13.486896514892578 ], [ "▁Crescent", -13.48690128326416 ], [ "▁ambasad", -13.486933708190918 ], [ "hotărâre", -13.486969947814941 ], [ "▁fraîche", -13.48709774017334 ], [ "▁bundesweit", -13.487581253051758 ], [ "nsbesondere", -13.487812042236328 ], [ "▁intoarce", -13.487863540649414 ], [ "▁Schokolade", -13.488319396972656 ], [ "▁adjective", -13.488319396972656 ], [ "▁incalzire", -13.488319396972656 ], [ "▁Qualification", -13.488320350646973 ], [ "▁Bolivia", -13.488324165344238 ], [ "▁cruelty", -13.488334655761719 ], [ "pläne", -13.48834228515625 ], [ "▁solitude", -13.488354682922363 ], [ "▁Bosnia", -13.488568305969238 ], [ "rohr", -13.488643646240234 ], [ "▁regrette", -13.48877239227295 ], [ "zusammengestellt", -13.48924732208252 ], [ "▁Kardashian", -13.489798545837402 ], [ "▁Picasso", -13.489798545837402 ], [ "▁unverbindlich", -13.489798545837402 ], [ "▁Headquarters", -13.489799499511719 ], [ "métrage", -13.4898099899292 ], [ "▁Magento", -13.489816665649414 ], [ "▁exhibitors", -13.489898681640625 ], [ "utty", -13.490381240844727 ], [ "▁Fünf", -13.490538597106934 ], [ "▁Peugeot", -13.490538597106934 ], [ "▁verdienen", -13.490538597106934 ], [ "▁absolviert", -13.49053955078125 ], [ "schutzerklärung", -13.490679740905762 ], [ "sistemele", -13.49089241027832 ], [ "▁concrète", -13.491279602050781 ], [ "▁rhyme", -13.491279602050781 ], [ "▁Continuous", -13.49128246307373 ], [ "versprechen", -13.491312026977539 ], [ "▁Melanie", -13.49202823638916 ], [ "▁clienţi", -13.492046356201172 ], [ "luckily", -13.492205619812012 ], [ "▁counterfeit", -13.492762565612793 ], [ "▁locomotive", -13.492889404296875 ], [ "▁reacți", -13.492908477783203 ], [ "ampered", -13.493005752563477 ], [ "atenția", -13.493011474609375 ], [ "Suppose", -13.493062973022461 ], [ "hinweis", -13.493464469909668 ], [ "verletzung", -13.493504524230957 ], [ "▁mănânc", -13.493504524230957 ], [ "▁provoac", -13.493507385253906 ], [ "▁regizor", -13.493511199951172 ], [ "kundig", -13.49352741241455 ], [ "embarqu", -13.493584632873535 ], [ "Radio", -13.493690490722656 ], [ "Ministrul", -13.493896484375 ], [ "weakened", -13.494214057922363 ], [ "▁translucent", -13.494247436523438 ], [ "George", -13.494380950927734 ], [ "▁bacterii", -13.494402885437012 ], [ "intervalul", -13.494803428649902 ], [ "▁vizualiz", -13.494832038879395 ], [ "▁Feuchtigkeit", -13.494991302490234 ], [ "▁choisissez", -13.494991302490234 ], [ "▁plausible", -13.494991302490234 ], [ "▁perpetu", -13.495122909545898 ], [ "▁bucati", -13.495194435119629 ], [ "▁Giovanni", -13.495735168457031 ], [ "▁bluetooth", -13.495736122131348 ], [ "▁translating", -13.49573802947998 ], [ "▁Kyoto", -13.495739936828613 ], [ "▁homosexual", -13.495745658874512 ], [ "treabă", -13.495820045471191 ], [ "ntrepid", -13.495983123779297 ], [ "▁fachlich", -13.496664047241211 ], [ "Vaccin", -13.496774673461914 ], [ "▁Treib", -13.497248649597168 ], [ "varsity", -13.497272491455078 ], [ "▁Tavern", -13.497278213500977 ], [ "▁ensue", -13.497330665588379 ], [ "flexibel", -13.497971534729004 ], [ "retrieved", -13.498102188110352 ], [ "traditionellen", -13.498230934143066 ], [ "▁circulati", -13.498546600341797 ], [ "▁Diagnose", -13.498717308044434 ], [ "▁Strawberry", -13.498717308044434 ], [ "Societatea", -13.49871826171875 ], [ "expertise", -13.498849868774414 ], [ "▁naturii", -13.499464988708496 ], [ "▁4:1", -13.499515533447266 ], [ "Frequently", -13.500210762023926 ], [ "disproportionate", -13.500210762023926 ], [ "▁LIMITED", -13.500210762023926 ], [ "▁ancestral", -13.500227928161621 ], [ "▁Logistik", -13.500237464904785 ], [ "▁recolt", -13.50042724609375 ], [ "▁liebevoll", -13.500436782836914 ], [ "importing", -13.500452041625977 ], [ "aparatul", -13.500458717346191 ], [ "poziţia", -13.500564575195312 ], [ "facerilor", -13.500658988952637 ], [ "Submitted", -13.50086784362793 ], [ "ografia", -13.501221656799316 ], [ "onformément", -13.50168228149414 ], [ "▁dissemination", -13.501708030700684 ], [ "afli", -13.501834869384766 ], [ "luminous", -13.502154350280762 ], [ "▁draußen", -13.502456665039062 ], [ "▁Zauber", -13.502535820007324 ], [ "▁Ibrahim", -13.503207206726074 ], [ "▁eruption", -13.503216743469238 ], [ "écrite", -13.50357723236084 ], [ "avril", -13.503898620605469 ], [ "Increasing", -13.504171371459961 ], [ "hingeg", -13.504411697387695 ], [ "fidelity", -13.504707336425781 ], [ "étonnant", -13.504707336425781 ], [ "▁créativité", -13.504707336425781 ], [ "▁Required", -13.504708290100098 ], [ "▁Edison", -13.504719734191895 ], [ "▁Stuhl", -13.504719734191895 ], [ "outhwestern", -13.506060600280762 ], [ "▁Beschwerden", -13.506210327148438 ], [ "▁angajaţi", -13.506210327148438 ], [ "▁Currency", -13.506211280822754 ], [ "▁reagiert", -13.506214141845703 ], [ "Science", -13.506229400634766 ], [ "hospital", -13.506253242492676 ], [ "professionellen", -13.50649356842041 ], [ "▁Trouve", -13.506768226623535 ], [ "▁utopi", -13.50683307647705 ], [ "gypte", -13.506928443908691 ], [ "▁Konsequenz", -13.506962776184082 ], [ "▁pacienți", -13.506962776184082 ], [ "▁orizont", -13.506988525390625 ], [ "Corey", -13.506999015808105 ], [ "▁quartet", -13.507009506225586 ], [ "▁Sherlock", -13.50710678100586 ], [ "▁gagné", -13.507237434387207 ], [ "▁Jusqu", -13.50732707977295 ], [ "▁Clickfunnel", -13.507465362548828 ], [ "Survivor", -13.507716178894043 ], [ "▁Beethoven", -13.507716178894043 ], [ "▁Exemplar", -13.507716178894043 ], [ "▁Gonzalez", -13.507716178894043 ], [ "▁Illustrator", -13.507716178894043 ], [ "▁Verpflichtung", -13.507718086242676 ], [ "Possibly", -13.507719993591309 ], [ "Maintenant", -13.507721900939941 ], [ "▁incendiu", -13.507721900939941 ], [ "▁poêl", -13.507747650146484 ], [ "▁aşez", -13.507757186889648 ], [ "phenol", -13.508248329162598 ], [ "▁magician", -13.508421897888184 ], [ "éventuellement", -13.508512496948242 ], [ "▁amortiz", -13.508736610412598 ], [ "bouchage", -13.50873851776123 ], [ "▁Accommodation", -13.509223937988281 ], [ "▁Significant", -13.509223937988281 ], [ "▁rejoice", -13.509223937988281 ], [ "▁Lorraine", -13.509224891662598 ], [ "▁Necklace", -13.509234428405762 ], [ "▁hamburger", -13.509273529052734 ], [ "Enhanced", -13.5095796585083 ], [ "▁Audrey", -13.509978294372559 ], [ "▁considère", -13.509986877441406 ], [ "hafen", -13.51050853729248 ], [ "acordare", -13.510509490966797 ], [ "▁ediți", -13.51075553894043 ], [ "▁militia", -13.510767936706543 ], [ "captivate", -13.510771751403809 ], [ "▁rebellion", -13.510777473449707 ], [ "▁veranstalte", -13.510844230651855 ], [ "▁matelas", -13.510859489440918 ], [ "originating", -13.510873794555664 ], [ "Typical", -13.51092529296875 ], [ "▁législat", -13.511360168457031 ], [ "▁Kräfte", -13.511488914489746 ], [ "▁Eigentümer", -13.511489868164062 ], [ "▁gonfl", -13.511608123779297 ], [ "dispoziție", -13.512028694152832 ], [ "▁Fabulous", -13.512246131896973 ], [ "▁Guillaume", -13.512246131896973 ], [ "▁Genuine", -13.512247085571289 ], [ "selbe", -13.512449264526367 ], [ "(2002)", -13.512616157531738 ], [ "Einen", -13.512908935546875 ], [ "▁Snapdragon", -13.513002395629883 ], [ "▁plagiarism", -13.513002395629883 ], [ "▁Rendez", -13.513019561767578 ], [ "▁înregistrare", -13.513033866882324 ], [ "probiert", -13.513081550598145 ], [ "gestiegen", -13.513153076171875 ], [ "Teatrul", -13.513370513916016 ], [ "trove", -13.513469696044922 ], [ "ntsprechend", -13.513566017150879 ], [ "Städten", -13.513691902160645 ], [ "unforeseen", -13.513760566711426 ], [ "▁Meridian", -13.513761520385742 ], [ "▁Ministries", -13.513763427734375 ], [ "plaît", -13.513769149780273 ], [ "▁Telefonnummer", -13.513772010803223 ], [ "welded", -13.513788223266602 ], [ "pondere", -13.513976097106934 ], [ "▁funcţiona", -13.514012336730957 ], [ "▁politicieni", -13.514187812805176 ], [ "fleck", -13.514240264892578 ], [ "▁Nitro", -13.514264106750488 ], [ "wettbewerb", -13.514518737792969 ], [ "▁ingrijire", -13.514518737792969 ], [ "▁Gehirn", -13.514521598815918 ], [ "sigură", -13.514904022216797 ], [ "400,000", -13.515237808227539 ], [ "▁cataract", -13.515277862548828 ], [ "outskirt", -13.515280723571777 ], [ "▁Identification", -13.515287399291992 ], [ "▁imperfections", -13.515317916870117 ], [ "▁Dokumentation", -13.515474319458008 ], [ "Engine", -13.515851974487305 ], [ "extindere", -13.516046524047852 ], [ "bijoux", -13.516797065734863 ], [ "▁dărui", -13.516802787780762 ], [ "▁Moderator", -13.516913414001465 ], [ "biblio", -13.517024040222168 ], [ "енн", -13.517024040222168 ], [ "▁Relevan", -13.51728630065918 ], [ "ansprüche", -13.517557144165039 ], [ "épaisseur", -13.517580032348633 ], [ "▁emoţi", -13.517677307128906 ], [ "exacerbate", -13.518318176269531 ], [ "▁Wimbledon", -13.518318176269531 ], [ "▁Pandora", -13.518319129943848 ], [ "perhaps", -13.518725395202637 ], [ "certify", -13.518762588500977 ], [ "Strukturen", -13.5189208984375 ], [ "▁Kreativität", -13.519079208374023 ], [ "schlägt", -13.51908016204834 ], [ "▁certifié", -13.51911735534668 ], [ "/09/", -13.519211769104004 ], [ "▁suprafaţ", -13.519493103027344 ], [ "verständnis", -13.519841194152832 ], [ "presedintele", -13.519842147827148 ], [ "▁orthopedic", -13.519842147827148 ], [ "▁superioara", -13.519843101501465 ], [ "älteste", -13.519903182983398 ], [ "▁conducător", -13.520153999328613 ], [ "supplementary", -13.520243644714355 ], [ "wetlands", -13.520438194274902 ], [ "▁suprafete", -13.520605087280273 ], [ "▁aparțin", -13.520951271057129 ], [ "analiză", -13.521014213562012 ], [ "Uneori", -13.52115535736084 ], [ "Toujours", -13.521368026733398 ], [ "▁Nairobi", -13.521368026733398 ], [ "▁asparagus", -13.521368026733398 ], [ "▁crowdfunding", -13.521368026733398 ], [ "gutachten", -13.521369934082031 ], [ "smelling", -13.521659851074219 ], [ "▁elektrisch", -13.521718978881836 ], [ "begging", -13.522055625915527 ], [ "▁Renewable", -13.522896766662598 ], [ "▁Trouble", -13.522896766662598 ], [ "▁devastated", -13.522896766662598 ], [ "▁remplacé", -13.522896766662598 ], [ "▁schmeckt", -13.522896766662598 ], [ "▁exerciți", -13.523005485534668 ], [ "▁vermute", -13.523650169372559 ], [ "▁Constanța", -13.523661613464355 ], [ "expunere", -13.523693084716797 ], [ "▁Fitzgerald", -13.52442741394043 ], [ "▁Mechanism", -13.524429321289062 ], [ "▁underscore", -13.524484634399414 ], [ "poziţie", -13.524901390075684 ], [ "stöbern", -13.525193214416504 ], [ "▁littérature", -13.525193214416504 ], [ "▁împrumut", -13.525193214416504 ], [ "Vision", -13.525771141052246 ], [ "▁overwhelm", -13.525773048400879 ], [ "▁erweitern", -13.525959968566895 ], [ "skeletal", -13.525960922241211 ], [ "▁terrified", -13.525960922241211 ], [ "aggravate", -13.525962829589844 ], [ "▁Malawi", -13.525969505310059 ], [ "▁neuroscience", -13.526009559631348 ], [ "trecută", -13.526097297668457 ], [ "▁maestr", -13.52634334564209 ], [ "нов", -13.526555061340332 ], [ "▁Cobb", -13.52667236328125 ], [ "▁Schwangerschaft", -13.526727676391602 ], [ "▁internationaux", -13.526727676391602 ], [ "▁entspannen", -13.526729583740234 ], [ "▁Früchte", -13.52676773071289 ], [ "mâine", -13.526805877685547 ], [ "stützt", -13.526938438415527 ], [ "flipped", -13.527076721191406 ], [ "Palatul", -13.527252197265625 ], [ "▁Gérard", -13.527496337890625 ], [ "▁Kensington", -13.527498245239258 ], [ "chargée", -13.52807331085205 ], [ "iolo", -13.528203964233398 ], [ "▁excesiv", -13.52904987335205 ], [ "▁Gymnas", -13.52962875366211 ], [ "▁optimise", -13.529678344726562 ], [ "possibilités", -13.529717445373535 ], [ "▁periculoas", -13.529810905456543 ], [ "mechanical", -13.529839515686035 ], [ "▁confruntă", -13.529868125915527 ], [ "quatrième", -13.530573844909668 ], [ "▁Preservation", -13.530573844909668 ], [ "▁Juventus", -13.530574798583984 ], [ "vorsitzende", -13.5305757522583 ], [ "électora", -13.530586242675781 ], [ "▁fascinant", -13.53061580657959 ], [ "▁lagoon", -13.530671119689941 ], [ "referencing", -13.53079605102539 ], [ "appointed", -13.530988693237305 ], [ "Audible", -13.531112670898438 ], [ "sighted", -13.531612396240234 ], [ "▁gewünscht", -13.532061576843262 ], [ "▁Expedition", -13.532115936279297 ], [ "▁genunchi", -13.532115936279297 ], [ "▁PROVIDE", -13.53211784362793 ], [ "▁rosemary", -13.532118797302246 ], [ "▁cleanliness", -13.532130241394043 ], [ "commanded", -13.53223991394043 ], [ "ältere", -13.532530784606934 ], [ "ност", -13.532547950744629 ], [ "kühlen", -13.532917976379395 ], [ "mettez", -13.533548355102539 ], [ "connaitre", -13.533661842346191 ], [ "Qaeda", -13.533662796020508 ], [ "▁traumhaft", -13.53366470336914 ], [ "kommst", -13.533666610717773 ], [ "▁Abbott", -13.533669471740723 ], [ "▁Fool", -13.533686637878418 ], [ "▁médaill", -13.533687591552734 ], [ "▁genotyp", -13.533693313598633 ], [ "▁Fälle", -13.53375244140625 ], [ "▁actuator", -13.533843994140625 ], [ "CLASS", -13.534042358398438 ], [ "progressively", -13.534421920776367 ], [ "negative", -13.53469467163086 ], [ "bundled", -13.535009384155273 ], [ "▁dezbatere", -13.535208702087402 ], [ "kamagra", -13.535237312316895 ], [ "gardinen", -13.535250663757324 ], [ "unsecured", -13.535271644592285 ], [ "Assisted", -13.535298347473145 ], [ "Gymnasium", -13.535386085510254 ], [ "▁brusc", -13.535591125488281 ], [ "prinzip", -13.535655975341797 ], [ "Torrent", -13.535964965820312 ], [ "Presented", -13.535967826843262 ], [ "▁impressionnant", -13.53628921508789 ], [ "charakter", -13.536758422851562 ], [ "▁Acoustic", -13.536762237548828 ], [ "▁appartient", -13.536763191223145 ], [ "gesteuert", -13.536879539489746 ], [ "▁condiți", -13.537089347839355 ], [ "authentic", -13.537313461303711 ], [ "▁Erholung", -13.537534713745117 ], [ "▁Veranstalter", -13.537534713745117 ], [ "▁Filial", -13.537665367126465 ], [ "ruhigen", -13.537714958190918 ], [ "symptôme", -13.538311004638672 ], [ "▁Efficiency", -13.538311004638672 ], [ "▁stunned", -13.538311004638672 ], [ "▁sympathique", -13.538311004638672 ], [ "Uploaded", -13.538352966308594 ], [ "▁geistig", -13.538453102111816 ], [ "Pläne", -13.538509368896484 ], [ "▁Apartament", -13.53855037689209 ], [ "▁ușoar", -13.539119720458984 ], [ "▁locuinț", -13.539122581481934 ], [ "épouse", -13.539166450500488 ], [ "îngrijire", -13.539215087890625 ], [ "Obtain", -13.539261817932129 ], [ "Detect", -13.539590835571289 ], [ "▁Dumitru", -13.539865493774414 ], [ "▁refrigeration", -13.539865493774414 ], [ "ärztliche", -13.539881706237793 ], [ "efficiency", -13.540032386779785 ], [ "▁snail", -13.540328979492188 ], [ "gelände", -13.540419578552246 ], [ "expected", -13.540620803833008 ], [ "kompetenz", -13.540643692016602 ], [ "▁sfânt", -13.540643692016602 ], [ "océan", -13.540685653686523 ], [ "▁Plasma", -13.540717124938965 ], [ "▁vulgar", -13.54075813293457 ], [ "▁slump", -13.541083335876465 ], [ "autoimmune", -13.541422843933105 ], [ "▁Cynthia", -13.541422843933105 ], [ "▁dimineaţ", -13.541422843933105 ], [ "▁whimsical", -13.541422843933105 ], [ "▁evaporate", -13.541488647460938 ], [ "▁calorii", -13.54186725616455 ], [ "portion", -13.54187297821045 ], [ "crowned", -13.5419282913208 ], [ "▁întâmpin", -13.54220199584961 ], [ "▁Centenar", -13.542620658874512 ], [ "▁Genehmigung", -13.54298210144043 ], [ "▁Wahrscheinlich", -13.54298210144043 ], [ "▁accompaniment", -13.54298210144043 ], [ "▁Negoti", -13.542984962463379 ], [ "▁Vanilla", -13.543000221252441 ], [ "▁Receiv", -13.543014526367188 ], [ "▁bestseller", -13.543052673339844 ], [ "tendons", -13.543069839477539 ], [ "Reilly", -13.543192863464355 ], [ "▁refroidi", -13.543731689453125 ], [ "▁überrascht", -13.543763160705566 ], [ "Gitarre", -13.543828964233398 ], [ "wände", -13.544173240661621 ], [ "veniturile", -13.544321060180664 ], [ "▁portofoliu", -13.54454517364502 ], [ "▁temporaire", -13.54454517364502 ], [ "▁Dawson", -13.544546127319336 ], [ "foreseeable", -13.544547080993652 ], [ "▁Gastgeber", -13.545344352722168 ], [ "Access", -13.545432090759277 ], [ "▁Defender", -13.545537948608398 ], [ "▁Quarry", -13.546109199523926 ], [ "▁trolley", -13.546110153198242 ], [ "▁carburant", -13.546111106872559 ], [ "▁titluri", -13.54631233215332 ], [ "comparatively", -13.546327590942383 ], [ "nachfolgend", -13.54659652709961 ], [ "anfang", -13.546740531921387 ], [ "▁faszinieren", -13.546891212463379 ], [ "trăiesc", -13.547082901000977 ], [ "▁Travail", -13.547159194946289 ], [ "Contact", -13.547235488891602 ], [ "fashion", -13.547245025634766 ], [ "▁épais", -13.547585487365723 ], [ "plattform", -13.547676086425781 ], [ "ventricular", -13.547677040100098 ], [ "▁Portsmouth", -13.547677993774414 ], [ "▁împărat", -13.54767894744873 ], [ "▁vândut", -13.547698020935059 ], [ "▁evidenț", -13.547708511352539 ], [ "Purchasing", -13.547877311706543 ], [ "discerning", -13.54804801940918 ], [ "odonti", -13.548080444335938 ], [ "distilled", -13.548316955566406 ], [ "saveur", -13.548447608947754 ], [ "▁récompense", -13.54845905303955 ], [ "confortul", -13.548552513122559 ], [ "arbeitete", -13.548787117004395 ], [ "partenerii", -13.549064636230469 ], [ "mirrored", -13.54908561706543 ], [ "Dienstleister", -13.549243927001953 ], [ "▁Jakarta", -13.549243927001953 ], [ "▁WEBSITE", -13.549243927001953 ], [ "▁Acquisition", -13.549262046813965 ], [ "▁Miranda", -13.549287796020508 ], [ "Syndic", -13.549356460571289 ], [ "▁stadiu", -13.549450874328613 ], [ "▁Parchet", -13.549498558044434 ], [ "Générale", -13.54954719543457 ], [ "▁jpl", -13.549579620361328 ], [ "attainable", -13.549949645996094 ], [ "École", -13.550041198730469 ], [ "Sphere", -13.550538063049316 ], [ "obtainable", -13.550592422485352 ], [ "▁Sapphire", -13.55081558227539 ], [ "▁aérienne", -13.55081558227539 ], [ "▁bărbați", -13.55081558227539 ], [ "▁irritating", -13.55081558227539 ], [ "▁ultraviolet", -13.550816535949707 ], [ "untouched", -13.550817489624023 ], [ "▁Ramsey", -13.550819396972656 ], [ "titres", -13.551087379455566 ], [ "▁Coordinat", -13.551218032836914 ], [ "believable", -13.551358222961426 ], [ "▁Grundsätzlich", -13.551602363586426 ], [ "▁konsequent", -13.551602363586426 ], [ "▁Cerceta", -13.551909446716309 ], [ "dirigé", -13.552116394042969 ], [ "▁disturb", -13.552151679992676 ], [ "conciliation", -13.552210807800293 ], [ "▁gelöscht", -13.552390098571777 ], [ "▁sauvegarde", -13.552391052246094 ], [ "▁cavities", -13.552393913269043 ], [ "stunde", -13.55241584777832 ], [ "▁foloseasc", -13.552430152893066 ], [ "▁simpati", -13.552873611450195 ], [ "Chacun", -13.553032875061035 ], [ "adversaire", -13.553178787231445 ], [ "Eigentlich", -13.55319881439209 ], [ "defense", -13.553593635559082 ], [ "consider", -13.553672790527344 ], [ "▁Trinidad", -13.553966522216797 ], [ "▁strategist", -13.553966522216797 ], [ "distorted", -13.553967475891113 ], [ "▁hypothetical", -13.553967475891113 ], [ "▁ramburs", -13.55396842956543 ], [ "▁Mallorca", -13.553970336914062 ], [ "▁Domino", -13.554018020629883 ], [ "arrondissement", -13.554756164550781 ], [ "konferenz", -13.554756164550781 ], [ "▁Beleuchtung", -13.554756164550781 ], [ "aggregat", -13.55484676361084 ], [ "subsidize", -13.554896354675293 ], [ "shri", -13.555503845214844 ], [ "Kaufentscheidung", -13.555545806884766 ], [ "▁Hernandez", -13.555545806884766 ], [ "▁Upholster", -13.555546760559082 ], [ "atlantic", -13.555614471435547 ], [ "▁locuinte", -13.555652618408203 ], [ "integrates", -13.55583381652832 ], [ "ewusst", -13.555878639221191 ], [ "▁Avocado", -13.556337356567383 ], [ "Decorative", -13.557014465332031 ], [ "▁Corinthians", -13.557127952575684 ], [ "▁clădire", -13.557127952575684 ], [ "▁plomberie", -13.557127952575684 ], [ "vases", -13.557143211364746 ], [ "▁crippl", -13.557247161865234 ], [ "cluttered", -13.557487487792969 ], [ "departed", -13.557807922363281 ], [ "▁entscheidet", -13.5579195022583 ], [ "Certaine", -13.558243751525879 ], [ "honda", -13.558294296264648 ], [ "triggering", -13.558527946472168 ], [ "▁Erdogan", -13.558712005615234 ], [ "▁Widerstand", -13.558712005615234 ], [ "▁Bhutan", -13.558713912963867 ], [ "▁ascunde", -13.558736801147461 ], [ "▁shading", -13.558748245239258 ], [ "behavioural", -13.559172630310059 ], [ "▁transfér", -13.55960750579834 ], [ "versichert", -13.559623718261719 ], [ "▁vinovat", -13.559646606445312 ], [ "▁airfare", -13.560142517089844 ], [ "▁simplistic", -13.56030559539795 ], [ "▁Asigura", -13.560320854187012 ], [ "Chauffe", -13.560480117797852 ], [ "scrisă", -13.560585975646973 ], [ "trouvez", -13.560702323913574 ], [ "greasy", -13.560709953308105 ], [ "bottled", -13.560809135437012 ], [ "grouped", -13.560934066772461 ], [ "▁beeinflussen", -13.561092376708984 ], [ "▁chronological", -13.561114311218262 ], [ "(2000)", -13.56127643585205 ], [ "sheltered", -13.561298370361328 ], [ "Historically", -13.561931610107422 ], [ "piled", -13.562012672424316 ], [ "publicate", -13.562378883361816 ], [ "▁étudié", -13.56268310546875 ], [ "▁vertraut", -13.562688827514648 ], [ "▁Anpassung", -13.562697410583496 ], [ "cifra", -13.562705993652344 ], [ "▁recueil", -13.562762260437012 ], [ "enforceable", -13.563183784484863 ], [ "Distinguished", -13.56347942352295 ], [ "Empfänger", -13.56347942352295 ], [ "▁Acrylic", -13.56347942352295 ], [ "▁Encyclopedia", -13.56347942352295 ], [ "▁proaspete", -13.56347942352295 ], [ "▁unrealistic", -13.56347942352295 ], [ "▁Assignment", -13.563481330871582 ], [ "▁incubator", -13.563491821289062 ], [ "▁unilateral", -13.563501358032227 ], [ "elasticity", -13.564398765563965 ], [ "amintim", -13.564475059509277 ], [ "fournit", -13.564553260803223 ], [ "semblent", -13.564763069152832 ], [ "▁$69.", -13.56496524810791 ], [ "▁prominence", -13.56507396697998 ], [ "Übertragung", -13.565075874328613 ], [ "▁2014-11-", -13.565075874328613 ], [ "▁Giurgiu", -13.565104484558105 ], [ "étendue", -13.565123558044434 ], [ "ceputul", -13.565187454223633 ], [ "Schwierigkeiten", -13.565872192382812 ], [ "▁subtract", -13.565881729125977 ], [ "▁gesichert", -13.56589126586914 ], [ "▁uimit", -13.565925598144531 ], [ "▁mensuel", -13.565967559814453 ], [ "Vorgaben", -13.566215515136719 ], [ "▁legitimacy", -13.566670417785645 ], [ "▁Kendall", -13.566673278808594 ], [ "▁détach", -13.566790580749512 ], [ "▁kennenlernen", -13.567469596862793 ], [ "▁gewöhnlich", -13.56747055053711 ], [ "Octav", -13.567917823791504 ], [ "responsive", -13.568169593811035 ], [ "▁Mängel", -13.568269729614258 ], [ "▁mișcare", -13.568269729614258 ], [ "▁ludique", -13.568270683288574 ], [ "▁Exeter", -13.568324089050293 ], [ "▁respins", -13.569114685058594 ], [ "oraşului", -13.569173812866211 ], [ "▁sfârşit", -13.56949520111084 ], [ "BUSINESS", -13.56987190246582 ], [ "illustrating", -13.56987190246582 ], [ "▁Tottenham", -13.56987190246582 ], [ "▁pruning", -13.569886207580566 ], [ "▁Înainte", -13.569904327392578 ], [ "▁interesel", -13.570096969604492 ], [ "discovered", -13.57031536102295 ], [ "(0)", -13.570572853088379 ], [ "▁Bewerber", -13.570673942565918 ], [ "▁DESIGN", -13.570673942565918 ], [ "▁Orientierung", -13.570686340332031 ], [ "library", -13.571041107177734 ], [ "cheltuielile", -13.571419715881348 ], [ "▁Canterbury", -13.571475982666016 ], [ "▁intellectuelle", -13.571477890014648 ], [ "▁amalgam", -13.571497917175293 ], [ "▁Toledo", -13.57150650024414 ], [ "gezahlt", -13.571531295776367 ], [ "Veronica", -13.571659088134766 ], [ "deleting", -13.571946144104004 ], [ "▁Merlin", -13.572442054748535 ], [ "▁opérationnel", -13.572554588317871 ], [ "schmutz", -13.572568893432617 ], [ "hyroid", -13.57279109954834 ], [ "▁Compatible", -13.57308292388916 ], [ "▁Leopard", -13.57308292388916 ], [ "▁cylindrical", -13.57308292388916 ], [ "▁terrestrial", -13.57308292388916 ], [ "conferencing", -13.573088645935059 ], [ "▁Variety", -13.573097229003906 ], [ "▁Screw", -13.573164939880371 ], [ "character", -13.573637962341309 ], [ "shortened", -13.573643684387207 ], [ "▁întrerup", -13.573736190795898 ], [ "freude", -13.573884010314941 ], [ "▁dezbateri", -13.573887825012207 ], [ "viteză", -13.574563026428223 ], [ "formațiile", -13.574600219726562 ], [ "▁responsibly", -13.574692726135254 ], [ "Dimensiuni", -13.574695587158203 ], [ "Arrangement", -13.57469654083252 ], [ "▁Leisure", -13.574712753295898 ], [ "escaping", -13.5750732421875 ], [ "flexion", -13.575104713439941 ], [ "▁religieuse", -13.575308799743652 ], [ "crystalline", -13.575457572937012 ], [ "▁clasp", -13.575520515441895 ], [ "festigt", -13.57554817199707 ], [ "▁trouvai", -13.57596206665039 ], [ "cutaneous", -13.576305389404297 ], [ "▁carcinoma", -13.576305389404297 ], [ "▁juxtapos", -13.576305389404297 ], [ "assemblage", -13.576306343078613 ], [ "▁Messiah", -13.576306343078613 ], [ "▁Sleeve", -13.576306343078613 ], [ "▁șofer", -13.576386451721191 ], [ "/05/", -13.57666301727295 ], [ "▁expoziți", -13.576703071594238 ], [ "▁pătrun", -13.577343940734863 ], [ "▁Lydia", -13.57739543914795 ], [ "▁grădini", -13.577919006347656 ], [ "▁toothpaste", -13.577919960021973 ], [ "ordained", -13.577921867370605 ], [ "▁Renovation", -13.577922821044922 ], [ "voicing", -13.578327178955078 ], [ "président", -13.578595161437988 ], [ "▁gestartet", -13.578728675842285 ], [ "Multi", -13.579121589660645 ], [ "itinéraire", -13.579537391662598 ], [ "▁influenza", -13.579537391662598 ], [ "▁psychiatrist", -13.579537391662598 ], [ "▁schizophrenia", -13.579537391662598 ], [ "▁Magnolia", -13.57953929901123 ], [ "▁Scottsdale", -13.579541206359863 ], [ "▁interessieren", -13.579548835754395 ], [ "▁asfalt", -13.579643249511719 ], [ "▁Journalism", -13.57977294921875 ], [ "Multe", -13.580089569091797 ], [ "Westfalen", -13.580347061157227 ], [ "▁Vorschriften", -13.580348014831543 ], [ "Angleterre", -13.58034896850586 ], [ "sustainable", -13.580354690551758 ], [ "▁Retour", -13.580589294433594 ], [ "▁pâr", -13.5809965133667 ], [ "steigert", -13.581120491027832 ], [ "▁AMAZING", -13.581157684326172 ], [ "▁turbulent", -13.581157684326172 ], [ "costing", -13.58155345916748 ], [ "▁Carolyn", -13.581634521484375 ], [ "utti", -13.581802368164062 ], [ "dürftig", -13.581968307495117 ], [ "Keep", -13.582038879394531 ], [ "▁Théâtre", -13.582780838012695 ], [ "▁combustibil", -13.582780838012695 ], [ "▁halloween", -13.582780838012695 ], [ "▁emulator", -13.582785606384277 ], [ "▁povești", -13.582785606384277 ], [ "broyeur", -13.582810401916504 ], [ "▁émerg", -13.582927703857422 ], [ "overwhelmingly", -13.583025932312012 ], [ "regulă", -13.583124160766602 ], [ "goutte", -13.583125114440918 ], [ "▁Fertigung", -13.583593368530273 ], [ "constituted", -13.584304809570312 ], [ "▁QuickBooks", -13.584406852722168 ], [ "▁genealogy", -13.584407806396484 ], [ "▁laundering", -13.584432601928711 ], [ "▁échéan", -13.584491729736328 ], [ "Account", -13.584601402282715 ], [ "oyons", -13.584792137145996 ], [ "nitro", -13.584905624389648 ], [ "▁corespund", -13.585219383239746 ], [ "▁suggér", -13.58527660369873 ], [ "manipulated", -13.585348129272461 ], [ "deseori", -13.585817337036133 ], [ "permeabil", -13.585912704467773 ], [ "Australia", -13.58594799041748 ], [ "▁Erasmus", -13.586034774780273 ], [ "▁disrespect", -13.586034774780273 ], [ "▁trimestre", -13.586038589477539 ], [ "▁emanat", -13.586103439331055 ], [ "Schraub", -13.58624267578125 ], [ "distinctly", -13.586319923400879 ], [ "Germain", -13.586637496948242 ], [ "▁pedepse", -13.5868501663208 ], [ "réglage", -13.5868558883667 ], [ "făcute", -13.587308883666992 ], [ "▁garanteaz", -13.587434768676758 ], [ "▁unterlieg", -13.587701797485352 ], [ "▁cheddar", -13.587712287902832 ], [ "▁refugi", -13.587756156921387 ], [ "▁inférieur", -13.587836265563965 ], [ "dimension", -13.588440895080566 ], [ "▁erkennt", -13.588570594787598 ], [ "amitié", -13.588632583618164 ], [ "▁predominant", -13.588680267333984 ], [ "nourishe", -13.588800430297852 ], [ "exerce", -13.588907241821289 ], [ "▁disguise", -13.589225769042969 ], [ "▁traditi", -13.589289665222168 ], [ "▁Intellectual", -13.5892972946167 ], [ "▁imunitar", -13.589299201965332 ], [ "▁Cushion", -13.589300155639648 ], [ "▁erwachsene", -13.589517593383789 ], [ "▁Internațional", -13.590115547180176 ], [ "<extra_id_99>", 0.0 ], [ "<extra_id_98>", 0.0 ], [ "<extra_id_97>", 0.0 ], [ "<extra_id_96>", 0.0 ], [ "<extra_id_95>", 0.0 ], [ "<extra_id_94>", 0.0 ], [ "<extra_id_93>", 0.0 ], [ "<extra_id_92>", 0.0 ], [ "<extra_id_91>", 0.0 ], [ "<extra_id_90>", 0.0 ], [ "<extra_id_89>", 0.0 ], [ "<extra_id_88>", 0.0 ], [ "<extra_id_87>", 0.0 ], [ "<extra_id_86>", 0.0 ], [ "<extra_id_85>", 0.0 ], [ "<extra_id_84>", 0.0 ], [ "<extra_id_83>", 0.0 ], [ "<extra_id_82>", 0.0 ], [ "<extra_id_81>", 0.0 ], [ "<extra_id_80>", 0.0 ], [ "<extra_id_79>", 0.0 ], [ "<extra_id_78>", 0.0 ], [ "<extra_id_77>", 0.0 ], [ "<extra_id_76>", 0.0 ], [ "<extra_id_75>", 0.0 ], [ "<extra_id_74>", 0.0 ], [ "<extra_id_73>", 0.0 ], [ "<extra_id_72>", 0.0 ], [ "<extra_id_71>", 0.0 ], [ "<extra_id_70>", 0.0 ], [ "<extra_id_69>", 0.0 ], [ "<extra_id_68>", 0.0 ], [ "<extra_id_67>", 0.0 ], [ "<extra_id_66>", 0.0 ], [ "<extra_id_65>", 0.0 ], [ "<extra_id_64>", 0.0 ], [ "<extra_id_63>", 0.0 ], [ "<extra_id_62>", 0.0 ], [ "<extra_id_61>", 0.0 ], [ "<extra_id_60>", 0.0 ], [ "<extra_id_59>", 0.0 ], [ "<extra_id_58>", 0.0 ], [ "<extra_id_57>", 0.0 ], [ "<extra_id_56>", 0.0 ], [ "<extra_id_55>", 0.0 ], [ "<extra_id_54>", 0.0 ], [ "<extra_id_53>", 0.0 ], [ "<extra_id_52>", 0.0 ], [ "<extra_id_51>", 0.0 ], [ "<extra_id_50>", 0.0 ], [ "<extra_id_49>", 0.0 ], [ "<extra_id_48>", 0.0 ], [ "<extra_id_47>", 0.0 ], [ "<extra_id_46>", 0.0 ], [ "<extra_id_45>", 0.0 ], [ "<extra_id_44>", 0.0 ], [ "<extra_id_43>", 0.0 ], [ "<extra_id_42>", 0.0 ], [ "<extra_id_41>", 0.0 ], [ "<extra_id_40>", 0.0 ], [ "<extra_id_39>", 0.0 ], [ "<extra_id_38>", 0.0 ], [ "<extra_id_37>", 0.0 ], [ "<extra_id_36>", 0.0 ], [ "<extra_id_35>", 0.0 ], [ "<extra_id_34>", 0.0 ], [ "<extra_id_33>", 0.0 ], [ "<extra_id_32>", 0.0 ], [ "<extra_id_31>", 0.0 ], [ "<extra_id_30>", 0.0 ], [ "<extra_id_29>", 0.0 ], [ "<extra_id_28>", 0.0 ], [ "<extra_id_27>", 0.0 ], [ "<extra_id_26>", 0.0 ], [ "<extra_id_25>", 0.0 ], [ "<extra_id_24>", 0.0 ], [ "<extra_id_23>", 0.0 ], [ "<extra_id_22>", 0.0 ], [ "<extra_id_21>", 0.0 ], [ "<extra_id_20>", 0.0 ], [ "<extra_id_19>", 0.0 ], [ "<extra_id_18>", 0.0 ], [ "<extra_id_17>", 0.0 ], [ "<extra_id_16>", 0.0 ], [ "<extra_id_15>", 0.0 ], [ "<extra_id_14>", 0.0 ], [ "<extra_id_13>", 0.0 ], [ "<extra_id_12>", 0.0 ], [ "<extra_id_11>", 0.0 ], [ "<extra_id_10>", 0.0 ], [ "<extra_id_9>", 0.0 ], [ "<extra_id_8>", 0.0 ], [ "<extra_id_7>", 0.0 ], [ "<extra_id_6>", 0.0 ], [ "<extra_id_5>", 0.0 ], [ "<extra_id_4>", 0.0 ], [ "<extra_id_3>", 0.0 ], [ "<extra_id_2>", 0.0 ], [ "<extra_id_1>", 0.0 ], [ "<extra_id_0>", 0.0 ] ], "byte_fallback": false } }
{ "added_tokens_decoder": { "0": { "content": "<pad>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "1": { "content": "</s>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "2": { "content": "<unk>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32000": { "content": "<extra_id_99>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32001": { "content": "<extra_id_98>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32002": { "content": "<extra_id_97>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32003": { "content": "<extra_id_96>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32004": { "content": "<extra_id_95>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32005": { "content": "<extra_id_94>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32006": { "content": "<extra_id_93>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32007": { "content": "<extra_id_92>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32008": { "content": "<extra_id_91>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32009": { "content": "<extra_id_90>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32010": { "content": "<extra_id_89>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32011": { "content": "<extra_id_88>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32012": { "content": "<extra_id_87>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32013": { "content": "<extra_id_86>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32014": { "content": "<extra_id_85>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32015": { "content": "<extra_id_84>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32016": { "content": "<extra_id_83>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32017": { "content": "<extra_id_82>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32018": { "content": "<extra_id_81>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32019": { "content": "<extra_id_80>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32020": { "content": "<extra_id_79>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32021": { "content": "<extra_id_78>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32022": { "content": "<extra_id_77>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32023": { "content": "<extra_id_76>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32024": { "content": "<extra_id_75>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32025": { "content": "<extra_id_74>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32026": { "content": "<extra_id_73>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32027": { "content": "<extra_id_72>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32028": { "content": "<extra_id_71>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32029": { "content": "<extra_id_70>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32030": { "content": "<extra_id_69>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32031": { "content": "<extra_id_68>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32032": { "content": "<extra_id_67>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32033": { "content": "<extra_id_66>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32034": { "content": "<extra_id_65>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32035": { "content": "<extra_id_64>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32036": { "content": "<extra_id_63>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32037": { "content": "<extra_id_62>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32038": { "content": "<extra_id_61>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32039": { "content": "<extra_id_60>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32040": { "content": "<extra_id_59>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32041": { "content": "<extra_id_58>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32042": { "content": "<extra_id_57>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32043": { "content": "<extra_id_56>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32044": { "content": "<extra_id_55>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32045": { "content": "<extra_id_54>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32046": { "content": "<extra_id_53>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32047": { "content": "<extra_id_52>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32048": { "content": "<extra_id_51>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32049": { "content": "<extra_id_50>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32050": { "content": "<extra_id_49>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32051": { "content": "<extra_id_48>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32052": { "content": "<extra_id_47>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32053": { "content": "<extra_id_46>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32054": { "content": "<extra_id_45>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32055": { "content": "<extra_id_44>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32056": { "content": "<extra_id_43>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32057": { "content": "<extra_id_42>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32058": { "content": "<extra_id_41>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32059": { "content": "<extra_id_40>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32060": { "content": "<extra_id_39>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32061": { "content": "<extra_id_38>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32062": { "content": "<extra_id_37>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32063": { "content": "<extra_id_36>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32064": { "content": "<extra_id_35>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32065": { "content": "<extra_id_34>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32066": { "content": "<extra_id_33>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32067": { "content": "<extra_id_32>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32068": { "content": "<extra_id_31>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32069": { "content": "<extra_id_30>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32070": { "content": "<extra_id_29>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32071": { "content": "<extra_id_28>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32072": { "content": "<extra_id_27>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32073": { "content": "<extra_id_26>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32074": { "content": "<extra_id_25>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32075": { "content": "<extra_id_24>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32076": { "content": "<extra_id_23>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32077": { "content": "<extra_id_22>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32078": { "content": "<extra_id_21>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32079": { "content": "<extra_id_20>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32080": { "content": "<extra_id_19>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32081": { "content": "<extra_id_18>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32082": { "content": "<extra_id_17>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32083": { "content": "<extra_id_16>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32084": { "content": "<extra_id_15>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32085": { "content": "<extra_id_14>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32086": { "content": "<extra_id_13>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32087": { "content": "<extra_id_12>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32088": { "content": "<extra_id_11>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32089": { "content": "<extra_id_10>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32090": { "content": "<extra_id_9>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32091": { "content": "<extra_id_8>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32092": { "content": "<extra_id_7>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32093": { "content": "<extra_id_6>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32094": { "content": "<extra_id_5>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32095": { "content": "<extra_id_4>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32096": { "content": "<extra_id_3>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32097": { "content": "<extra_id_2>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32098": { "content": "<extra_id_1>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32099": { "content": "<extra_id_0>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true } }, "additional_special_tokens": [ "<extra_id_0>", "<extra_id_1>", "<extra_id_2>", "<extra_id_3>", "<extra_id_4>", "<extra_id_5>", "<extra_id_6>", "<extra_id_7>", "<extra_id_8>", "<extra_id_9>", "<extra_id_10>", "<extra_id_11>", "<extra_id_12>", "<extra_id_13>", "<extra_id_14>", "<extra_id_15>", "<extra_id_16>", "<extra_id_17>", "<extra_id_18>", "<extra_id_19>", "<extra_id_20>", "<extra_id_21>", "<extra_id_22>", "<extra_id_23>", "<extra_id_24>", "<extra_id_25>", "<extra_id_26>", "<extra_id_27>", "<extra_id_28>", "<extra_id_29>", "<extra_id_30>", "<extra_id_31>", "<extra_id_32>", "<extra_id_33>", "<extra_id_34>", "<extra_id_35>", "<extra_id_36>", "<extra_id_37>", "<extra_id_38>", "<extra_id_39>", "<extra_id_40>", "<extra_id_41>", "<extra_id_42>", "<extra_id_43>", "<extra_id_44>", "<extra_id_45>", "<extra_id_46>", "<extra_id_47>", "<extra_id_48>", "<extra_id_49>", "<extra_id_50>", "<extra_id_51>", "<extra_id_52>", "<extra_id_53>", "<extra_id_54>", "<extra_id_55>", "<extra_id_56>", "<extra_id_57>", "<extra_id_58>", "<extra_id_59>", "<extra_id_60>", "<extra_id_61>", "<extra_id_62>", "<extra_id_63>", "<extra_id_64>", "<extra_id_65>", "<extra_id_66>", "<extra_id_67>", "<extra_id_68>", "<extra_id_69>", "<extra_id_70>", "<extra_id_71>", "<extra_id_72>", "<extra_id_73>", "<extra_id_74>", "<extra_id_75>", "<extra_id_76>", "<extra_id_77>", "<extra_id_78>", "<extra_id_79>", "<extra_id_80>", "<extra_id_81>", "<extra_id_82>", "<extra_id_83>", "<extra_id_84>", "<extra_id_85>", "<extra_id_86>", "<extra_id_87>", "<extra_id_88>", "<extra_id_89>", "<extra_id_90>", "<extra_id_91>", "<extra_id_92>", "<extra_id_93>", "<extra_id_94>", "<extra_id_95>", "<extra_id_96>", "<extra_id_97>", "<extra_id_98>", "<extra_id_99>" ], "clean_up_tokenization_spaces": true, "eos_token": "</s>", "extra_ids": 100, "legacy": false, "model_max_length": 512, "pad_token": "<pad>", "sp_model_kwargs": {}, "tokenizer_class": "T5Tokenizer", "unk_token": "<unk>" }
""" Tiny AutoEncoder for Stable Diffusion (DNN for encoding / decoding SD's latent space) """ import torch import torch.nn as nn import comfy.utils import comfy.ops def conv(n_in, n_out, **kwargs): return comfy.ops.disable_weight_init.Conv2d(n_in, n_out, 3, padding=1, **kwargs) class Clamp(nn.Module): def forward(self, x): return torch.tanh(x / 3) * 3 class Block(nn.Module): def __init__(self, n_in, n_out): super().__init__() self.conv = nn.Sequential(conv(n_in, n_out), nn.ReLU(), conv(n_out, n_out), nn.ReLU(), conv(n_out, n_out)) self.skip = comfy.ops.disable_weight_init.Conv2d(n_in, n_out, 1, bias=False) if n_in != n_out else nn.Identity() self.fuse = nn.ReLU() def forward(self, x): return self.fuse(self.conv(x) + self.skip(x)) def Encoder(latent_channels=4): return nn.Sequential( conv(3, 64), Block(64, 64), conv(64, 64, stride=2, bias=False), Block(64, 64), Block(64, 64), Block(64, 64), conv(64, 64, stride=2, bias=False), Block(64, 64), Block(64, 64), Block(64, 64), conv(64, 64, stride=2, bias=False), Block(64, 64), Block(64, 64), Block(64, 64), conv(64, latent_channels), ) def Decoder(latent_channels=4): return nn.Sequential( Clamp(), conv(latent_channels, 64), nn.ReLU(), Block(64, 64), Block(64, 64), Block(64, 64), nn.Upsample(scale_factor=2), conv(64, 64, bias=False), Block(64, 64), Block(64, 64), Block(64, 64), nn.Upsample(scale_factor=2), conv(64, 64, bias=False), Block(64, 64), Block(64, 64), Block(64, 64), nn.Upsample(scale_factor=2), conv(64, 64, bias=False), Block(64, 64), conv(64, 3), ) class TAESD(nn.Module): latent_magnitude = 3 latent_shift = 0.5 def __init__(self, encoder_path=None, decoder_path=None, latent_channels=4): """Initialize pretrained TAESD on the given device from the given checkpoints.""" super().__init__() self.taesd_encoder = Encoder(latent_channels=latent_channels) self.taesd_decoder = Decoder(latent_channels=latent_channels) self.vae_scale = torch.nn.Parameter(torch.tensor(1.0)) self.vae_shift = torch.nn.Parameter(torch.tensor(0.0)) if encoder_path is not None: self.taesd_encoder.load_state_dict(comfy.utils.load_torch_file(encoder_path, safe_load=True)) if decoder_path is not None: self.taesd_decoder.load_state_dict(comfy.utils.load_torch_file(decoder_path, safe_load=True)) @staticmethod def scale_latents(x): """raw latents -> [0, 1]""" return x.div(2 * TAESD.latent_magnitude).add(TAESD.latent_shift).clamp(0, 1) @staticmethod def unscale_latents(x): """[0, 1] -> raw latents""" return x.sub(TAESD.latent_shift).mul(2 * TAESD.latent_magnitude) def decode(self, x): x_sample = self.taesd_decoder((x - self.vae_shift) * self.vae_scale) x_sample = x_sample.sub(0.5).mul(2) return x_sample def encode(self, x): return (self.taesd_encoder(x * 0.5 + 0.5) / self.vae_scale) + self.vae_shift
import comfy.samplers import comfy.utils import torch import numpy as np from tqdm.auto import trange, tqdm import math @torch.no_grad() def sample_lcm_upscale(model, x, sigmas, extra_args=None, callback=None, disable=None, total_upscale=2.0, upscale_method="bislerp", upscale_steps=None): extra_args = {} if extra_args is None else extra_args if upscale_steps is None: upscale_steps = max(len(sigmas) else: upscale_steps += 1 upscale_steps = min(upscale_steps, len(sigmas) + 1) upscales = np.linspace(1.0, total_upscale, upscale_steps)[1:] orig_shape = x.size() s_in = x.new_ones([x.shape[0]]) for i in trange(len(sigmas) - 1, disable=disable): denoised = model(x, sigmas[i] * s_in, **extra_args) if callback is not None: callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) x = denoised if i < len(upscales): x = comfy.utils.common_upscale(x, round(orig_shape[-1] * upscales[i]), round(orig_shape[-2] * upscales[i]), upscale_method, "disabled") if sigmas[i + 1] > 0: x += sigmas[i + 1] * torch.randn_like(x) return x class SamplerLCMUpscale: upscale_methods = ["bislerp", "nearest-exact", "bilinear", "area", "bicubic"] @classmethod def INPUT_TYPES(s): return {"required": {"scale_ratio": ("FLOAT", {"default": 1.0, "min": 0.1, "max": 20.0, "step": 0.01}), "scale_steps": ("INT", {"default": -1, "min": -1, "max": 1000, "step": 1}), "upscale_method": (s.upscale_methods,), } } RETURN_TYPES = ("SAMPLER",) CATEGORY = "sampling/custom_sampling/samplers" FUNCTION = "get_sampler" def get_sampler(self, scale_ratio, scale_steps, upscale_method): if scale_steps < 0: scale_steps = None sampler = comfy.samplers.KSAMPLER(sample_lcm_upscale, extra_options={"total_upscale": scale_ratio, "upscale_steps": scale_steps, "upscale_method": upscale_method}) return (sampler, ) NODE_CLASS_MAPPINGS = { "SamplerLCMUpscale": SamplerLCMUpscale, }
import numpy as np import torch def loglinear_interp(t_steps, num_steps): """ Performs log-linear interpolation of a given array of decreasing numbers. """ xs = np.linspace(0, 1, len(t_steps)) ys = np.log(t_steps[::-1]) new_xs = np.linspace(0, 1, num_steps) new_ys = np.interp(new_xs, xs, ys) interped_ys = np.exp(new_ys)[::-1].copy() return interped_ys NOISE_LEVELS = {"SD1": [14.6146412293, 6.4745760956, 3.8636745985, 2.6946151520, 1.8841921177, 1.3943805092, 0.9642583904, 0.6523686016, 0.3977456272, 0.1515232662, 0.0291671582], "SDXL":[14.6146412293, 6.3184485287, 3.7681790315, 2.1811480769, 1.3405244945, 0.8620721141, 0.5550693289, 0.3798540708, 0.2332364134, 0.1114188177, 0.0291671582], "SVD": [700.00, 54.5, 15.886, 7.977, 4.248, 1.789, 0.981, 0.403, 0.173, 0.034, 0.002]} class AlignYourStepsScheduler: @classmethod def INPUT_TYPES(s): return {"required": {"model_type": (["SD1", "SDXL", "SVD"], ), "steps": ("INT", {"default": 10, "min": 10, "max": 10000}), "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), } } RETURN_TYPES = ("SIGMAS",) CATEGORY = "sampling/custom_sampling/schedulers" FUNCTION = "get_sigmas" def get_sigmas(self, model_type, steps, denoise): total_steps = steps if denoise < 1.0: if denoise <= 0.0: return (torch.FloatTensor([]),) total_steps = round(steps * denoise) sigmas = NOISE_LEVELS[model_type][:] if (steps + 1) != len(sigmas): sigmas = loglinear_interp(sigmas, steps + 1) sigmas = sigmas[-(total_steps + 1):] sigmas[-1] = 0 return (torch.FloatTensor(sigmas), ) NODE_CLASS_MAPPINGS = { "AlignYourStepsScheduler": AlignYourStepsScheduler, }
def attention_multiply(attn, model, q, k, v, out): m = model.clone() sd = model.model_state_dict() for key in sd: if key.endswith("{}.to_q.bias".format(attn)) or key.endswith("{}.to_q.weight".format(attn)): m.add_patches({key: (None,)}, 0.0, q) if key.endswith("{}.to_k.bias".format(attn)) or key.endswith("{}.to_k.weight".format(attn)): m.add_patches({key: (None,)}, 0.0, k) if key.endswith("{}.to_v.bias".format(attn)) or key.endswith("{}.to_v.weight".format(attn)): m.add_patches({key: (None,)}, 0.0, v) if key.endswith("{}.to_out.0.bias".format(attn)) or key.endswith("{}.to_out.0.weight".format(attn)): m.add_patches({key: (None,)}, 0.0, out) return m class UNetSelfAttentionMultiply: @classmethod def INPUT_TYPES(s): return {"required": { "model": ("MODEL",), "q": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), "k": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), "v": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), "out": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), }} RETURN_TYPES = ("MODEL",) FUNCTION = "patch" CATEGORY = "_for_testing/attention_experiments" def patch(self, model, q, k, v, out): m = attention_multiply("attn1", model, q, k, v, out) return (m, ) class UNetCrossAttentionMultiply: @classmethod def INPUT_TYPES(s): return {"required": { "model": ("MODEL",), "q": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), "k": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), "v": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), "out": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), }} RETURN_TYPES = ("MODEL",) FUNCTION = "patch" CATEGORY = "_for_testing/attention_experiments" def patch(self, model, q, k, v, out): m = attention_multiply("attn2", model, q, k, v, out) return (m, ) class CLIPAttentionMultiply: @classmethod def INPUT_TYPES(s): return {"required": { "clip": ("CLIP",), "q": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), "k": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), "v": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), "out": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), }} RETURN_TYPES = ("CLIP",) FUNCTION = "patch" CATEGORY = "_for_testing/attention_experiments" def patch(self, clip, q, k, v, out): m = clip.clone() sd = m.patcher.model_state_dict() for key in sd: if key.endswith("self_attn.q_proj.weight") or key.endswith("self_attn.q_proj.bias"): m.add_patches({key: (None,)}, 0.0, q) if key.endswith("self_attn.k_proj.weight") or key.endswith("self_attn.k_proj.bias"): m.add_patches({key: (None,)}, 0.0, k) if key.endswith("self_attn.v_proj.weight") or key.endswith("self_attn.v_proj.bias"): m.add_patches({key: (None,)}, 0.0, v) if key.endswith("self_attn.out_proj.weight") or key.endswith("self_attn.out_proj.bias"): m.add_patches({key: (None,)}, 0.0, out) return (m, ) class UNetTemporalAttentionMultiply: @classmethod def INPUT_TYPES(s): return {"required": { "model": ("MODEL",), "self_structural": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), "self_temporal": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), "cross_structural": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), "cross_temporal": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), }} RETURN_TYPES = ("MODEL",) FUNCTION = "patch" CATEGORY = "_for_testing/attention_experiments" def patch(self, model, self_structural, self_temporal, cross_structural, cross_temporal): m = model.clone() sd = model.model_state_dict() for k in sd: if (k.endswith("attn1.to_out.0.bias") or k.endswith("attn1.to_out.0.weight")): if '.time_stack.' in k: m.add_patches({k: (None,)}, 0.0, self_temporal) else: m.add_patches({k: (None,)}, 0.0, self_structural) elif (k.endswith("attn2.to_out.0.bias") or k.endswith("attn2.to_out.0.weight")): if '.time_stack.' in k: m.add_patches({k: (None,)}, 0.0, cross_temporal) else: m.add_patches({k: (None,)}, 0.0, cross_structural) return (m, ) NODE_CLASS_MAPPINGS = { "UNetSelfAttentionMultiply": UNetSelfAttentionMultiply, "UNetCrossAttentionMultiply": UNetCrossAttentionMultiply, "CLIPAttentionMultiply": CLIPAttentionMultiply, "UNetTemporalAttentionMultiply": UNetTemporalAttentionMultiply, }
import torchaudio import torch import comfy.model_management import folder_paths import os class EmptyLatentAudio: def __init__(self): self.device = comfy.model_management.intermediate_device() @classmethod def INPUT_TYPES(s): return {"required": {}} RETURN_TYPES = ("LATENT",) FUNCTION = "generate" CATEGORY = "_for_testing/audio" def generate(self): batch_size = 1 latent = torch.zeros([batch_size, 64, 1024], device=self.device) return ({"samples":latent, "type": "audio"}, ) class VAEEncodeAudio: @classmethod def INPUT_TYPES(s): return {"required": { "audio": ("AUDIO", ), "vae": ("VAE", )}} RETURN_TYPES = ("LATENT",) FUNCTION = "encode" CATEGORY = "_for_testing/audio" def encode(self, vae, audio): t = vae.encode(audio["waveform"].movedim(1, -1)) return ({"samples":t}, ) class VAEDecodeAudio: @classmethod def INPUT_TYPES(s): return {"required": { "samples": ("LATENT", ), "vae": ("VAE", )}} RETURN_TYPES = ("AUDIO",) FUNCTION = "decode" CATEGORY = "_for_testing/audio" def decode(self, vae, samples): audio = vae.decode(samples["samples"]).movedim(-1, 1) return ({"waveform": audio, "sample_rate": 44100}, ) class SaveAudio: def __init__(self): self.output_dir = folder_paths.get_output_directory() self.type = "output" self.prefix_append = "" self.compress_level = 4 @classmethod def INPUT_TYPES(s): return {"required": { "audio": ("AUDIO", ), "filename_prefix": ("STRING", {"default": "audio/ComfyUI"})}, "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"}, } RETURN_TYPES = () FUNCTION = "save_audio" OUTPUT_NODE = True CATEGORY = "_for_testing/audio" def save_audio(self, audio, filename_prefix="ComfyUI", prompt=None, extra_pnginfo=None): filename_prefix += self.prefix_append full_output_folder, filename, counter, subfolder, filename_prefix = folder_paths.get_save_image_path(filename_prefix, self.output_dir) results = list() for (batch_number, waveform) in enumerate(audio["waveform"]): filename_with_batch_num = filename.replace("%batch_num%", str(batch_number)) file = f"{filename_with_batch_num}_{counter:05}_.flac" torchaudio.save(os.path.join(full_output_folder, file), waveform, audio["sample_rate"], format="FLAC") results.append({ "filename": file, "subfolder": subfolder, "type": self.type }) counter += 1 return { "ui": { "audio": results } } class LoadAudio: @classmethod def INPUT_TYPES(s): input_dir = folder_paths.get_input_directory() files = [f for f in os.listdir(input_dir) if os.path.isfile(os.path.join(input_dir, f))] return {"required": {"audio": [sorted(files), ]}, } CATEGORY = "_for_testing/audio" RETURN_TYPES = ("AUDIO", ) FUNCTION = "load" def load(self, audio): audio_path = folder_paths.get_annotated_filepath(audio) waveform, sample_rate = torchaudio.load(audio_path) multiplier = 1.0 audio = {"waveform": waveform.unsqueeze(0), "sample_rate": sample_rate} return (audio, ) @classmethod def IS_CHANGED(s, audio): image_path = folder_paths.get_annotated_filepath(audio) m = hashlib.sha256() with open(image_path, 'rb') as f: m.update(f.read()) return m.digest().hex() @classmethod def VALIDATE_INPUTS(s, audio): if not folder_paths.exists_annotated_filepath(audio): return "Invalid audio file: {}".format(audio) return True NODE_CLASS_MAPPINGS = { "EmptyLatentAudio": EmptyLatentAudio, "VAEEncodeAudio": VAEEncodeAudio, "VAEDecodeAudio": VAEDecodeAudio, "SaveAudio": SaveAudio, "LoadAudio": LoadAudio, }
from kornia.filters import canny import comfy.model_management class Canny: @classmethod def INPUT_TYPES(s): return {"required": {"image": ("IMAGE",), "low_threshold": ("FLOAT", {"default": 0.4, "min": 0.01, "max": 0.99, "step": 0.01}), "high_threshold": ("FLOAT", {"default": 0.8, "min": 0.01, "max": 0.99, "step": 0.01}) }} RETURN_TYPES = ("IMAGE",) FUNCTION = "detect_edge" CATEGORY = "image/preprocessors" def detect_edge(self, image, low_threshold, high_threshold): output = canny(image.to(comfy.model_management.get_torch_device()).movedim(-1, 1), low_threshold, high_threshold) img_out = output[1].to(comfy.model_management.intermediate_device()).repeat(1, 3, 1, 1).movedim(1, -1) return (img_out,) NODE_CLASS_MAPPINGS = { "Canny": Canny, }
import torch from nodes import MAX_RESOLUTION class CLIPTextEncodeSDXLRefiner: @classmethod def INPUT_TYPES(s): return {"required": { "ascore": ("FLOAT", {"default": 6.0, "min": 0.0, "max": 1000.0, "step": 0.01}), "width": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}), "height": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}), "text": ("STRING", {"multiline": True, "dynamicPrompts": True}), "clip": ("CLIP", ), }} RETURN_TYPES = ("CONDITIONING",) FUNCTION = "encode" CATEGORY = "advanced/conditioning" def encode(self, clip, ascore, width, height, text): tokens = clip.tokenize(text) cond, pooled = clip.encode_from_tokens(tokens, return_pooled=True) return ([[cond, {"pooled_output": pooled, "aesthetic_score": ascore, "width": width,"height": height}]], ) class CLIPTextEncodeSDXL: @classmethod def INPUT_TYPES(s): return {"required": { "width": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}), "height": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}), "crop_w": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION}), "crop_h": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION}), "target_width": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}), "target_height": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}), "text_g": ("STRING", {"multiline": True, "dynamicPrompts": True}), "clip": ("CLIP", ), "text_l": ("STRING", {"multiline": True, "dynamicPrompts": True}), "clip": ("CLIP", ), }} RETURN_TYPES = ("CONDITIONING",) FUNCTION = "encode" CATEGORY = "advanced/conditioning" def encode(self, clip, width, height, crop_w, crop_h, target_width, target_height, text_g, text_l): tokens = clip.tokenize(text_g) tokens["l"] = clip.tokenize(text_l)["l"] if len(tokens["l"]) != len(tokens["g"]): empty = clip.tokenize("") while len(tokens["l"]) < len(tokens["g"]): tokens["l"] += empty["l"] while len(tokens["l"]) > len(tokens["g"]): tokens["g"] += empty["g"] cond, pooled = clip.encode_from_tokens(tokens, return_pooled=True) return ([[cond, {"pooled_output": pooled, "width": width, "height": height, "crop_w": crop_w, "crop_h": crop_h, "target_width": target_width, "target_height": target_height}]], ) NODE_CLASS_MAPPINGS = { "CLIPTextEncodeSDXLRefiner": CLIPTextEncodeSDXLRefiner, "CLIPTextEncodeSDXL": CLIPTextEncodeSDXL, }
import numpy as np import torch import comfy.utils from enum import Enum def resize_mask(mask, shape): return torch.nn.functional.interpolate(mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])), size=(shape[0], shape[1]), mode="bilinear").squeeze(1) class PorterDuffMode(Enum): ADD = 0 CLEAR = 1 DARKEN = 2 DST = 3 DST_ATOP = 4 DST_IN = 5 DST_OUT = 6 DST_OVER = 7 LIGHTEN = 8 MULTIPLY = 9 OVERLAY = 10 SCREEN = 11 SRC = 12 SRC_ATOP = 13 SRC_IN = 14 SRC_OUT = 15 SRC_OVER = 16 XOR = 17 def porter_duff_composite(src_image: torch.Tensor, src_alpha: torch.Tensor, dst_image: torch.Tensor, dst_alpha: torch.Tensor, mode: PorterDuffMode): src_alpha = 1 - src_alpha dst_alpha = 1 - dst_alpha src_image = src_image * src_alpha dst_image = dst_image * dst_alpha if mode == PorterDuffMode.ADD: out_alpha = torch.clamp(src_alpha + dst_alpha, 0, 1) out_image = torch.clamp(src_image + dst_image, 0, 1) elif mode == PorterDuffMode.CLEAR: out_alpha = torch.zeros_like(dst_alpha) out_image = torch.zeros_like(dst_image) elif mode == PorterDuffMode.DARKEN: out_alpha = src_alpha + dst_alpha - src_alpha * dst_alpha out_image = (1 - dst_alpha) * src_image + (1 - src_alpha) * dst_image + torch.min(src_image, dst_image) elif mode == PorterDuffMode.DST: out_alpha = dst_alpha out_image = dst_image elif mode == PorterDuffMode.DST_ATOP: out_alpha = src_alpha out_image = src_alpha * dst_image + (1 - dst_alpha) * src_image elif mode == PorterDuffMode.DST_IN: out_alpha = src_alpha * dst_alpha out_image = dst_image * src_alpha elif mode == PorterDuffMode.DST_OUT: out_alpha = (1 - src_alpha) * dst_alpha out_image = (1 - src_alpha) * dst_image elif mode == PorterDuffMode.DST_OVER: out_alpha = dst_alpha + (1 - dst_alpha) * src_alpha out_image = dst_image + (1 - dst_alpha) * src_image elif mode == PorterDuffMode.LIGHTEN: out_alpha = src_alpha + dst_alpha - src_alpha * dst_alpha out_image = (1 - dst_alpha) * src_image + (1 - src_alpha) * dst_image + torch.max(src_image, dst_image) elif mode == PorterDuffMode.MULTIPLY: out_alpha = src_alpha * dst_alpha out_image = src_image * dst_image elif mode == PorterDuffMode.OVERLAY: out_alpha = src_alpha + dst_alpha - src_alpha * dst_alpha out_image = torch.where(2 * dst_image < dst_alpha, 2 * src_image * dst_image, src_alpha * dst_alpha - 2 * (dst_alpha - src_image) * (src_alpha - dst_image)) elif mode == PorterDuffMode.SCREEN: out_alpha = src_alpha + dst_alpha - src_alpha * dst_alpha out_image = src_image + dst_image - src_image * dst_image elif mode == PorterDuffMode.SRC: out_alpha = src_alpha out_image = src_image elif mode == PorterDuffMode.SRC_ATOP: out_alpha = dst_alpha out_image = dst_alpha * src_image + (1 - src_alpha) * dst_image elif mode == PorterDuffMode.SRC_IN: out_alpha = src_alpha * dst_alpha out_image = src_image * dst_alpha elif mode == PorterDuffMode.SRC_OUT: out_alpha = (1 - dst_alpha) * src_alpha out_image = (1 - dst_alpha) * src_image elif mode == PorterDuffMode.SRC_OVER: out_alpha = src_alpha + (1 - src_alpha) * dst_alpha out_image = src_image + (1 - src_alpha) * dst_image elif mode == PorterDuffMode.XOR: out_alpha = (1 - dst_alpha) * src_alpha + (1 - src_alpha) * dst_alpha out_image = (1 - dst_alpha) * src_image + (1 - src_alpha) * dst_image else: return None, None out_image = torch.where(out_alpha > 1e-5, out_image / out_alpha, torch.zeros_like(out_image)) out_image = torch.clamp(out_image, 0, 1) out_alpha = 1 - out_alpha return out_image, out_alpha class PorterDuffImageComposite: @classmethod def INPUT_TYPES(s): return { "required": { "source": ("IMAGE",), "source_alpha": ("MASK",), "destination": ("IMAGE",), "destination_alpha": ("MASK",), "mode": ([mode.name for mode in PorterDuffMode], {"default": PorterDuffMode.DST.name}), }, } RETURN_TYPES = ("IMAGE", "MASK") FUNCTION = "composite" CATEGORY = "mask/compositing" def composite(self, source: torch.Tensor, source_alpha: torch.Tensor, destination: torch.Tensor, destination_alpha: torch.Tensor, mode): batch_size = min(len(source), len(source_alpha), len(destination), len(destination_alpha)) out_images = [] out_alphas = [] for i in range(batch_size): src_image = source[i] dst_image = destination[i] assert src_image.shape[2] == dst_image.shape[2] src_alpha = source_alpha[i].unsqueeze(2) dst_alpha = destination_alpha[i].unsqueeze(2) if dst_alpha.shape[:2] != dst_image.shape[:2]: upscale_input = dst_alpha.unsqueeze(0).permute(0, 3, 1, 2) upscale_output = comfy.utils.common_upscale(upscale_input, dst_image.shape[1], dst_image.shape[0], upscale_method='bicubic', crop='center') dst_alpha = upscale_output.permute(0, 2, 3, 1).squeeze(0) if src_image.shape != dst_image.shape: upscale_input = src_image.unsqueeze(0).permute(0, 3, 1, 2) upscale_output = comfy.utils.common_upscale(upscale_input, dst_image.shape[1], dst_image.shape[0], upscale_method='bicubic', crop='center') src_image = upscale_output.permute(0, 2, 3, 1).squeeze(0) if src_alpha.shape != dst_alpha.shape: upscale_input = src_alpha.unsqueeze(0).permute(0, 3, 1, 2) upscale_output = comfy.utils.common_upscale(upscale_input, dst_alpha.shape[1], dst_alpha.shape[0], upscale_method='bicubic', crop='center') src_alpha = upscale_output.permute(0, 2, 3, 1).squeeze(0) out_image, out_alpha = porter_duff_composite(src_image, src_alpha, dst_image, dst_alpha, PorterDuffMode[mode]) out_images.append(out_image) out_alphas.append(out_alpha.squeeze(2)) result = (torch.stack(out_images), torch.stack(out_alphas)) return result class SplitImageWithAlpha: @classmethod def INPUT_TYPES(s): return { "required": { "image": ("IMAGE",), } } CATEGORY = "mask/compositing" RETURN_TYPES = ("IMAGE", "MASK") FUNCTION = "split_image_with_alpha" def split_image_with_alpha(self, image: torch.Tensor): out_images = [i[:,:,:3] for i in image] out_alphas = [i[:,:,3] if i.shape[2] > 3 else torch.ones_like(i[:,:,0]) for i in image] result = (torch.stack(out_images), 1.0 - torch.stack(out_alphas)) return result class JoinImageWithAlpha: @classmethod def INPUT_TYPES(s): return { "required": { "image": ("IMAGE",), "alpha": ("MASK",), } } CATEGORY = "mask/compositing" RETURN_TYPES = ("IMAGE",) FUNCTION = "join_image_with_alpha" def join_image_with_alpha(self, image: torch.Tensor, alpha: torch.Tensor): batch_size = min(len(image), len(alpha)) out_images = [] alpha = 1.0 - resize_mask(alpha, image.shape[1:]) for i in range(batch_size): out_images.append(torch.cat((image[i][:,:,:3], alpha[i].unsqueeze(2)), dim=2)) result = (torch.stack(out_images),) return result NODE_CLASS_MAPPINGS = { "PorterDuffImageComposite": PorterDuffImageComposite, "SplitImageWithAlpha": SplitImageWithAlpha, "JoinImageWithAlpha": JoinImageWithAlpha, } NODE_DISPLAY_NAME_MAPPINGS = { "PorterDuffImageComposite": "Porter-Duff Image Composite", "SplitImageWithAlpha": "Split Image with Alpha", "JoinImageWithAlpha": "Join Image with Alpha", }
class CLIPTextEncodeControlnet: @classmethod def INPUT_TYPES(s): return {"required": {"clip": ("CLIP", ), "conditioning": ("CONDITIONING", ), "text": ("STRING", {"multiline": True, "dynamicPrompts": True})}} RETURN_TYPES = ("CONDITIONING",) FUNCTION = "encode" CATEGORY = "_for_testing/conditioning" def encode(self, clip, conditioning, text): tokens = clip.tokenize(text) cond, pooled = clip.encode_from_tokens(tokens, return_pooled=True) c = [] for t in conditioning: n = [t[0], t[1].copy()] n[1]['cross_attn_controlnet'] = cond n[1]['pooled_output_controlnet'] = pooled c.append(n) return (c, ) NODE_CLASS_MAPPINGS = { "CLIPTextEncodeControlnet": CLIPTextEncodeControlnet }
import comfy.samplers import comfy.sample from comfy.k_diffusion import sampling as k_diffusion_sampling import latent_preview import torch import comfy.utils import node_helpers class BasicScheduler: @classmethod def INPUT_TYPES(s): return {"required": {"model": ("MODEL",), "scheduler": (comfy.samplers.SCHEDULER_NAMES, ), "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), } } RETURN_TYPES = ("SIGMAS",) CATEGORY = "sampling/custom_sampling/schedulers" FUNCTION = "get_sigmas" def get_sigmas(self, model, scheduler, steps, denoise): total_steps = steps if denoise < 1.0: if denoise <= 0.0: return (torch.FloatTensor([]),) total_steps = int(steps/denoise) sigmas = comfy.samplers.calculate_sigmas(model.get_model_object("model_sampling"), scheduler, total_steps).cpu() sigmas = sigmas[-(steps + 1):] return (sigmas, ) class KarrasScheduler: @classmethod def INPUT_TYPES(s): return {"required": {"steps": ("INT", {"default": 20, "min": 1, "max": 10000}), "sigma_max": ("FLOAT", {"default": 14.614642, "min": 0.0, "max": 5000.0, "step":0.01, "round": False}), "sigma_min": ("FLOAT", {"default": 0.0291675, "min": 0.0, "max": 5000.0, "step":0.01, "round": False}), "rho": ("FLOAT", {"default": 7.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), } } RETURN_TYPES = ("SIGMAS",) CATEGORY = "sampling/custom_sampling/schedulers" FUNCTION = "get_sigmas" def get_sigmas(self, steps, sigma_max, sigma_min, rho): sigmas = k_diffusion_sampling.get_sigmas_karras(n=steps, sigma_min=sigma_min, sigma_max=sigma_max, rho=rho) return (sigmas, ) class ExponentialScheduler: @classmethod def INPUT_TYPES(s): return {"required": {"steps": ("INT", {"default": 20, "min": 1, "max": 10000}), "sigma_max": ("FLOAT", {"default": 14.614642, "min": 0.0, "max": 5000.0, "step":0.01, "round": False}), "sigma_min": ("FLOAT", {"default": 0.0291675, "min": 0.0, "max": 5000.0, "step":0.01, "round": False}), } } RETURN_TYPES = ("SIGMAS",) CATEGORY = "sampling/custom_sampling/schedulers" FUNCTION = "get_sigmas" def get_sigmas(self, steps, sigma_max, sigma_min): sigmas = k_diffusion_sampling.get_sigmas_exponential(n=steps, sigma_min=sigma_min, sigma_max=sigma_max) return (sigmas, ) class PolyexponentialScheduler: @classmethod def INPUT_TYPES(s): return {"required": {"steps": ("INT", {"default": 20, "min": 1, "max": 10000}), "sigma_max": ("FLOAT", {"default": 14.614642, "min": 0.0, "max": 5000.0, "step":0.01, "round": False}), "sigma_min": ("FLOAT", {"default": 0.0291675, "min": 0.0, "max": 5000.0, "step":0.01, "round": False}), "rho": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), } } RETURN_TYPES = ("SIGMAS",) CATEGORY = "sampling/custom_sampling/schedulers" FUNCTION = "get_sigmas" def get_sigmas(self, steps, sigma_max, sigma_min, rho): sigmas = k_diffusion_sampling.get_sigmas_polyexponential(n=steps, sigma_min=sigma_min, sigma_max=sigma_max, rho=rho) return (sigmas, ) class SDTurboScheduler: @classmethod def INPUT_TYPES(s): return {"required": {"model": ("MODEL",), "steps": ("INT", {"default": 1, "min": 1, "max": 10}), "denoise": ("FLOAT", {"default": 1.0, "min": 0, "max": 1.0, "step": 0.01}), } } RETURN_TYPES = ("SIGMAS",) CATEGORY = "sampling/custom_sampling/schedulers" FUNCTION = "get_sigmas" def get_sigmas(self, model, steps, denoise): start_step = 10 - int(10 * denoise) timesteps = torch.flip(torch.arange(1, 11) * 100 - 1, (0,))[start_step:start_step + steps] sigmas = model.get_model_object("model_sampling").sigma(timesteps) sigmas = torch.cat([sigmas, sigmas.new_zeros([1])]) return (sigmas, ) class VPScheduler: @classmethod def INPUT_TYPES(s): return {"required": {"steps": ("INT", {"default": 20, "min": 1, "max": 10000}), "beta_d": ("FLOAT", {"default": 19.9, "min": 0.0, "max": 5000.0, "step":0.01, "round": False}), "beta_min": ("FLOAT", {"default": 0.1, "min": 0.0, "max": 5000.0, "step":0.01, "round": False}), "eps_s": ("FLOAT", {"default": 0.001, "min": 0.0, "max": 1.0, "step":0.0001, "round": False}), } } RETURN_TYPES = ("SIGMAS",) CATEGORY = "sampling/custom_sampling/schedulers" FUNCTION = "get_sigmas" def get_sigmas(self, steps, beta_d, beta_min, eps_s): sigmas = k_diffusion_sampling.get_sigmas_vp(n=steps, beta_d=beta_d, beta_min=beta_min, eps_s=eps_s) return (sigmas, ) class SplitSigmas: @classmethod def INPUT_TYPES(s): return {"required": {"sigmas": ("SIGMAS", ), "step": ("INT", {"default": 0, "min": 0, "max": 10000}), } } RETURN_TYPES = ("SIGMAS","SIGMAS") RETURN_NAMES = ("high_sigmas", "low_sigmas") CATEGORY = "sampling/custom_sampling/sigmas" FUNCTION = "get_sigmas" def get_sigmas(self, sigmas, step): sigmas1 = sigmas[:step + 1] sigmas2 = sigmas[step:] return (sigmas1, sigmas2) class SplitSigmasDenoise: @classmethod def INPUT_TYPES(s): return {"required": {"sigmas": ("SIGMAS", ), "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), } } RETURN_TYPES = ("SIGMAS","SIGMAS") RETURN_NAMES = ("high_sigmas", "low_sigmas") CATEGORY = "sampling/custom_sampling/sigmas" FUNCTION = "get_sigmas" def get_sigmas(self, sigmas, denoise): steps = max(sigmas.shape[-1] - 1, 0) total_steps = round(steps * denoise) sigmas1 = sigmas[:-(total_steps)] sigmas2 = sigmas[-(total_steps + 1):] return (sigmas1, sigmas2) class FlipSigmas: @classmethod def INPUT_TYPES(s): return {"required": {"sigmas": ("SIGMAS", ), } } RETURN_TYPES = ("SIGMAS",) CATEGORY = "sampling/custom_sampling/sigmas" FUNCTION = "get_sigmas" def get_sigmas(self, sigmas): if len(sigmas) == 0: return (sigmas,) sigmas = sigmas.flip(0) if sigmas[0] == 0: sigmas[0] = 0.0001 return (sigmas,) class KSamplerSelect: @classmethod def INPUT_TYPES(s): return {"required": {"sampler_name": (comfy.samplers.SAMPLER_NAMES, ), } } RETURN_TYPES = ("SAMPLER",) CATEGORY = "sampling/custom_sampling/samplers" FUNCTION = "get_sampler" def get_sampler(self, sampler_name): sampler = comfy.samplers.sampler_object(sampler_name) return (sampler, ) class SamplerDPMPP_3M_SDE: @classmethod def INPUT_TYPES(s): return {"required": {"eta": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), "s_noise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), "noise_device": (['gpu', 'cpu'], ), } } RETURN_TYPES = ("SAMPLER",) CATEGORY = "sampling/custom_sampling/samplers" FUNCTION = "get_sampler" def get_sampler(self, eta, s_noise, noise_device): if noise_device == 'cpu': sampler_name = "dpmpp_3m_sde" else: sampler_name = "dpmpp_3m_sde_gpu" sampler = comfy.samplers.ksampler(sampler_name, {"eta": eta, "s_noise": s_noise}) return (sampler, ) class SamplerDPMPP_2M_SDE: @classmethod def INPUT_TYPES(s): return {"required": {"solver_type": (['midpoint', 'heun'], ), "eta": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), "s_noise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), "noise_device": (['gpu', 'cpu'], ), } } RETURN_TYPES = ("SAMPLER",) CATEGORY = "sampling/custom_sampling/samplers" FUNCTION = "get_sampler" def get_sampler(self, solver_type, eta, s_noise, noise_device): if noise_device == 'cpu': sampler_name = "dpmpp_2m_sde" else: sampler_name = "dpmpp_2m_sde_gpu" sampler = comfy.samplers.ksampler(sampler_name, {"eta": eta, "s_noise": s_noise, "solver_type": solver_type}) return (sampler, ) class SamplerDPMPP_SDE: @classmethod def INPUT_TYPES(s): return {"required": {"eta": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), "s_noise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), "r": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), "noise_device": (['gpu', 'cpu'], ), } } RETURN_TYPES = ("SAMPLER",) CATEGORY = "sampling/custom_sampling/samplers" FUNCTION = "get_sampler" def get_sampler(self, eta, s_noise, r, noise_device): if noise_device == 'cpu': sampler_name = "dpmpp_sde" else: sampler_name = "dpmpp_sde_gpu" sampler = comfy.samplers.ksampler(sampler_name, {"eta": eta, "s_noise": s_noise, "r": r}) return (sampler, ) class SamplerEulerAncestral: @classmethod def INPUT_TYPES(s): return {"required": {"eta": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), "s_noise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), } } RETURN_TYPES = ("SAMPLER",) CATEGORY = "sampling/custom_sampling/samplers" FUNCTION = "get_sampler" def get_sampler(self, eta, s_noise): sampler = comfy.samplers.ksampler("euler_ancestral", {"eta": eta, "s_noise": s_noise}) return (sampler, ) class SamplerLMS: @classmethod def INPUT_TYPES(s): return {"required": {"order": ("INT", {"default": 4, "min": 1, "max": 100}), } } RETURN_TYPES = ("SAMPLER",) CATEGORY = "sampling/custom_sampling/samplers" FUNCTION = "get_sampler" def get_sampler(self, order): sampler = comfy.samplers.ksampler("lms", {"order": order}) return (sampler, ) class SamplerDPMAdaptative: @classmethod def INPUT_TYPES(s): return {"required": {"order": ("INT", {"default": 3, "min": 2, "max": 3}), "rtol": ("FLOAT", {"default": 0.05, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), "atol": ("FLOAT", {"default": 0.0078, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), "h_init": ("FLOAT", {"default": 0.05, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), "pcoeff": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), "icoeff": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), "dcoeff": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), "accept_safety": ("FLOAT", {"default": 0.81, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), "eta": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), "s_noise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), } } RETURN_TYPES = ("SAMPLER",) CATEGORY = "sampling/custom_sampling/samplers" FUNCTION = "get_sampler" def get_sampler(self, order, rtol, atol, h_init, pcoeff, icoeff, dcoeff, accept_safety, eta, s_noise): sampler = comfy.samplers.ksampler("dpm_adaptive", {"order": order, "rtol": rtol, "atol": atol, "h_init": h_init, "pcoeff": pcoeff, "icoeff": icoeff, "dcoeff": dcoeff, "accept_safety": accept_safety, "eta": eta, "s_noise":s_noise }) return (sampler, ) class Noise_EmptyNoise: def __init__(self): self.seed = 0 def generate_noise(self, input_latent): latent_image = input_latent["samples"] return torch.zeros(latent_image.shape, dtype=latent_image.dtype, layout=latent_image.layout, device="cpu") class Noise_RandomNoise: def __init__(self, seed): self.seed = seed def generate_noise(self, input_latent): latent_image = input_latent["samples"] batch_inds = input_latent["batch_index"] if "batch_index" in input_latent else None return comfy.sample.prepare_noise(latent_image, self.seed, batch_inds) class SamplerCustom: @classmethod def INPUT_TYPES(s): return {"required": {"model": ("MODEL",), "add_noise": ("BOOLEAN", {"default": True}), "noise_seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0, "step":0.1, "round": 0.01}), "positive": ("CONDITIONING", ), "negative": ("CONDITIONING", ), "sampler": ("SAMPLER", ), "sigmas": ("SIGMAS", ), "latent_image": ("LATENT", ), } } RETURN_TYPES = ("LATENT","LATENT") RETURN_NAMES = ("output", "denoised_output") FUNCTION = "sample" CATEGORY = "sampling/custom_sampling" def sample(self, model, add_noise, noise_seed, cfg, positive, negative, sampler, sigmas, latent_image): latent = latent_image latent_image = latent["samples"] latent = latent.copy() latent_image = comfy.sample.fix_empty_latent_channels(model, latent_image) latent["samples"] = latent_image if not add_noise: noise = Noise_EmptyNoise().generate_noise(latent) else: noise = Noise_RandomNoise(noise_seed).generate_noise(latent) noise_mask = None if "noise_mask" in latent: noise_mask = latent["noise_mask"] x0_output = {} callback = latent_preview.prepare_callback(model, sigmas.shape[-1] - 1, x0_output) disable_pbar = not comfy.utils.PROGRESS_BAR_ENABLED samples = comfy.sample.sample_custom(model, noise, cfg, sampler, sigmas, positive, negative, latent_image, noise_mask=noise_mask, callback=callback, disable_pbar=disable_pbar, seed=noise_seed) out = latent.copy() out["samples"] = samples if "x0" in x0_output: out_denoised = latent.copy() out_denoised["samples"] = model.model.process_latent_out(x0_output["x0"].cpu()) else: out_denoised = out return (out, out_denoised) class Guider_Basic(comfy.samplers.CFGGuider): def set_conds(self, positive): self.inner_set_conds({"positive": positive}) class BasicGuider: @classmethod def INPUT_TYPES(s): return {"required": {"model": ("MODEL",), "conditioning": ("CONDITIONING", ), } } RETURN_TYPES = ("GUIDER",) FUNCTION = "get_guider" CATEGORY = "sampling/custom_sampling/guiders" def get_guider(self, model, conditioning): guider = Guider_Basic(model) guider.set_conds(conditioning) return (guider,) class CFGGuider: @classmethod def INPUT_TYPES(s): return {"required": {"model": ("MODEL",), "positive": ("CONDITIONING", ), "negative": ("CONDITIONING", ), "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0, "step":0.1, "round": 0.01}), } } RETURN_TYPES = ("GUIDER",) FUNCTION = "get_guider" CATEGORY = "sampling/custom_sampling/guiders" def get_guider(self, model, positive, negative, cfg): guider = comfy.samplers.CFGGuider(model) guider.set_conds(positive, negative) guider.set_cfg(cfg) return (guider,) class Guider_DualCFG(comfy.samplers.CFGGuider): def set_cfg(self, cfg1, cfg2): self.cfg1 = cfg1 self.cfg2 = cfg2 def set_conds(self, positive, middle, negative): middle = node_helpers.conditioning_set_values(middle, {"prompt_type": "negative"}) self.inner_set_conds({"positive": positive, "middle": middle, "negative": negative}) def predict_noise(self, x, timestep, model_options={}, seed=None): negative_cond = self.conds.get("negative", None) middle_cond = self.conds.get("middle", None) out = comfy.samplers.calc_cond_batch(self.inner_model, [negative_cond, middle_cond, self.conds.get("positive", None)], x, timestep, model_options) return comfy.samplers.cfg_function(self.inner_model, out[1], out[0], self.cfg2, x, timestep, model_options=model_options, cond=middle_cond, uncond=negative_cond) + (out[2] - out[1]) * self.cfg1 class DualCFGGuider: @classmethod def INPUT_TYPES(s): return {"required": {"model": ("MODEL",), "cond1": ("CONDITIONING", ), "cond2": ("CONDITIONING", ), "negative": ("CONDITIONING", ), "cfg_conds": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0, "step":0.1, "round": 0.01}), "cfg_cond2_negative": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0, "step":0.1, "round": 0.01}), } } RETURN_TYPES = ("GUIDER",) FUNCTION = "get_guider" CATEGORY = "sampling/custom_sampling/guiders" def get_guider(self, model, cond1, cond2, negative, cfg_conds, cfg_cond2_negative): guider = Guider_DualCFG(model) guider.set_conds(cond1, cond2, negative) guider.set_cfg(cfg_conds, cfg_cond2_negative) return (guider,) class DisableNoise: @classmethod def INPUT_TYPES(s): return {"required":{ } } RETURN_TYPES = ("NOISE",) FUNCTION = "get_noise" CATEGORY = "sampling/custom_sampling/noise" def get_noise(self): return (Noise_EmptyNoise(),) class RandomNoise(DisableNoise): @classmethod def INPUT_TYPES(s): return {"required":{ "noise_seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), } } def get_noise(self, noise_seed): return (Noise_RandomNoise(noise_seed),) class SamplerCustomAdvanced: @classmethod def INPUT_TYPES(s): return {"required": {"noise": ("NOISE", ), "guider": ("GUIDER", ), "sampler": ("SAMPLER", ), "sigmas": ("SIGMAS", ), "latent_image": ("LATENT", ), } } RETURN_TYPES = ("LATENT","LATENT") RETURN_NAMES = ("output", "denoised_output") FUNCTION = "sample" CATEGORY = "sampling/custom_sampling" def sample(self, noise, guider, sampler, sigmas, latent_image): latent = latent_image latent_image = latent["samples"] latent = latent.copy() latent_image = comfy.sample.fix_empty_latent_channels(guider.model_patcher, latent_image) latent["samples"] = latent_image noise_mask = None if "noise_mask" in latent: noise_mask = latent["noise_mask"] x0_output = {} callback = latent_preview.prepare_callback(guider.model_patcher, sigmas.shape[-1] - 1, x0_output) disable_pbar = not comfy.utils.PROGRESS_BAR_ENABLED samples = guider.sample(noise.generate_noise(latent), latent_image, sampler, sigmas, denoise_mask=noise_mask, callback=callback, disable_pbar=disable_pbar, seed=noise.seed) samples = samples.to(comfy.model_management.intermediate_device()) out = latent.copy() out["samples"] = samples if "x0" in x0_output: out_denoised = latent.copy() out_denoised["samples"] = guider.model_patcher.model.process_latent_out(x0_output["x0"].cpu()) else: out_denoised = out return (out, out_denoised) class AddNoise: @classmethod def INPUT_TYPES(s): return {"required": {"model": ("MODEL",), "noise": ("NOISE", ), "sigmas": ("SIGMAS", ), "latent_image": ("LATENT", ), } } RETURN_TYPES = ("LATENT",) FUNCTION = "add_noise" CATEGORY = "_for_testing/custom_sampling/noise" def add_noise(self, model, noise, sigmas, latent_image): if len(sigmas) == 0: return latent_image latent = latent_image latent_image = latent["samples"] noisy = noise.generate_noise(latent) model_sampling = model.get_model_object("model_sampling") process_latent_out = model.get_model_object("process_latent_out") process_latent_in = model.get_model_object("process_latent_in") if len(sigmas) > 1: scale = torch.abs(sigmas[0] - sigmas[-1]) else: scale = sigmas[0] if torch.count_nonzero(latent_image) > 0: latent_image = process_latent_in(latent_image) noisy = model_sampling.noise_scaling(scale, noisy, latent_image) noisy = process_latent_out(noisy) noisy = torch.nan_to_num(noisy, nan=0.0, posinf=0.0, neginf=0.0) out = latent.copy() out["samples"] = noisy return (out,) NODE_CLASS_MAPPINGS = { "SamplerCustom": SamplerCustom, "BasicScheduler": BasicScheduler, "KarrasScheduler": KarrasScheduler, "ExponentialScheduler": ExponentialScheduler, "PolyexponentialScheduler": PolyexponentialScheduler, "VPScheduler": VPScheduler, "SDTurboScheduler": SDTurboScheduler, "KSamplerSelect": KSamplerSelect, "SamplerEulerAncestral": SamplerEulerAncestral, "SamplerLMS": SamplerLMS, "SamplerDPMPP_3M_SDE": SamplerDPMPP_3M_SDE, "SamplerDPMPP_2M_SDE": SamplerDPMPP_2M_SDE, "SamplerDPMPP_SDE": SamplerDPMPP_SDE, "SamplerDPMAdaptative": SamplerDPMAdaptative, "SplitSigmas": SplitSigmas, "SplitSigmasDenoise": SplitSigmasDenoise, "FlipSigmas": FlipSigmas, "CFGGuider": CFGGuider, "DualCFGGuider": DualCFGGuider, "BasicGuider": BasicGuider, "RandomNoise": RandomNoise, "DisableNoise": DisableNoise, "AddNoise": AddNoise, "SamplerCustomAdvanced": SamplerCustomAdvanced, }